diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/src/main/java/org/well/well/util/ResponseResolver.java b/src/main/java/org/well/well/util/ResponseResolver.java index 6fef231..f73734e 100644 --- a/src/main/java/org/well/well/util/ResponseResolver.java +++ b/src/main/java/org/well/well/util/ResponseResolver.java @@ -20,14 +20,15 @@ static { abstractResponseHashMap.put(DeviceTypeEnum.Liquid.name(), new LiquidResponse()); abstractResponseHashMap.put(DeviceTypeEnum.Well.name(), new WellResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse());//集中器 abstractResponseHashMap.put(DeviceTypeEnum.Methane.name(), new MethaneResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.WasteGas.name(), new WasteGasResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse()); -// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse());//温湿度 +// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse());//噪音开挖 + abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse());//井盖定位 + abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse());//水质检测 } diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/src/main/java/org/well/well/util/ResponseResolver.java b/src/main/java/org/well/well/util/ResponseResolver.java index 6fef231..f73734e 100644 --- a/src/main/java/org/well/well/util/ResponseResolver.java +++ b/src/main/java/org/well/well/util/ResponseResolver.java @@ -20,14 +20,15 @@ static { abstractResponseHashMap.put(DeviceTypeEnum.Liquid.name(), new LiquidResponse()); abstractResponseHashMap.put(DeviceTypeEnum.Well.name(), new WellResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse());//集中器 abstractResponseHashMap.put(DeviceTypeEnum.Methane.name(), new MethaneResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.WasteGas.name(), new WasteGasResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse()); -// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse());//温湿度 +// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse());//噪音开挖 + abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse());//井盖定位 + abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse());//水质检测 } diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..4701fab --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,41 @@ +Manifest-Version: 1.0 +Main-Class: org.well.mysql.sink.WellSink +Class-Path: slf4j-log4j12-1.6.1.jar commons-codec-1.9.jar commons-pool2- + 2.4.2.jar javassist-3.18.1-GA.jar jcl-over-slf4j-1.7.7.jar jansi-1.11.j + ar hawtbuf-1.11.jar jackson-databind-2.4.2.jar mina-core-2.0.4.jar juni + t-4.10.jar log4j-1.2.16.jar dom4j-1.6.1.jar hamcrest-core-1.1.jar orika + -core-1.4.5.jar activemq-pool-5.14.5.jar activemq-client-5.14.5.jar spr + ing-beans-4.0.6.RELEASE.jar httpcore-4.2.1.jar jboss-transaction-api_1. + 2_spec-1.0.0.Final.jar commons-io-2.4.jar concurrentlinkedhashmap-lru-1 + .2_jdk5.jar commons-cli-1.2.jar commons-fileupload-1.3.1.jar jsr305-1.3 + .9.jar gson-2.2.2.jar jackson-jaxrs-json-provider-2.4.2.jar jetty-6.1.2 + 6.jar janino-2.7.6.jar commons-beanutils-1.9.2.jar hibernate-core-4.3.6 + .Final.jar guava-10.0.1.jar spring-context-support-4.0.6.RELEASE.jar lo + g4jdbc-remix-0.2.7.jar hibernate-jpa-2.1-api-1.0.0.Final.jar servlet-ap + i-2.5-20110124.jar geronimo-jms_1.1_spec-1.1.1.jar java-sizeof-0.0.4.ja + r aopalliance-1.0.jar jandex-1.1.0.Final.jar jboss-logging-3.1.3.GA.jar + antlr-2.7.7.jar validation-api-1.1.0.Final.jar geronimo-jta_1.0.1B_spe + c-1.0.1.jar jackson-jaxrs-base-2.4.2.jar logback-core-1.1.2.jar hiberna + te-validator-5.1.2.Final.jar logback-classic-1.1.2.jar avro-1.7.3.jar s + pring-tx-4.0.6.RELEASE.jar spring-orm-4.0.6.RELEASE.jar activemq-broker + -5.14.5.jar kafka-clients-2.4.0-hw-ei-312005.jar commons-pool-1.5.4.jar + ezmorph-1.0.6.jar netty-3.4.0.Final.jar avro-ipc-1.7.3.jar flume-ng-co + re-1.4.0.jar activemq-openwire-legacy-5.14.5.jar commons-compiler-2.7.6 + .jar jetty-util-6.1.26.jar xbean-spring-4.2.jar jackson-core-asl-1.9.3. + jar jackson-mapper-asl-1.9.3.jar spring-core-4.0.6.RELEASE.jar geronimo + -j2ee-management_1.1_spec-1.0.1.jar ojdbc6-11.1.0.7.0.jar json-lib-2.4- + jdk15.jar activemq-jms-pool-5.14.5.jar jul-to-slf4j-1.7.7.jar velocity- + 1.7.jar joda-time-2.1.jar commons-httpclient-3.1.jar spring-context-4.0 + .6.RELEASE.jar commons-lang3-3.3.2.jar commons-lang-2.5.jar libthrift-0 + .7.0.jar jackson-core-2.4.2.jar hibernate-commons-annotations-4.0.5.Fin + al.jar commons-logging-1.1.1.jar fastjson-1.1.15.jar mysql-connector-ja + va-5.1.25.jar spring-aop-4.0.6.RELEASE.jar slf4j-api-1.7.7.jar httpclie + nt-4.2.1.jar flume-ng-configuration-1.4.0.jar jboss-logging-annotations + -1.2.0.Beta1.jar snappy-java-1.0.4.1.jar paranamer-2.3.jar flume-ng-sdk + -1.4.0.jar spring-webmvc-4.0.6.RELEASE.jar jackson-annotations-2.4.0.ja + r jackson-module-jaxb-annotations-2.4.2.jar commons-dbcp-1.4.jar spring + -jms-3.2.8.RELEASE.jar spring-expression-4.0.6.RELEASE.jar jstl-1.2.jar + xml-apis-1.0.b2.jar activemq-spring-5.14.5.jar spring-jdbc-4.0.6.RELEA + SE.jar classmate-1.0.0.jar commons-collections-3.2.1.jar spring-web-4.0 + .6.RELEASE.jar + diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/src/main/java/org/well/well/util/ResponseResolver.java b/src/main/java/org/well/well/util/ResponseResolver.java index 6fef231..f73734e 100644 --- a/src/main/java/org/well/well/util/ResponseResolver.java +++ b/src/main/java/org/well/well/util/ResponseResolver.java @@ -20,14 +20,15 @@ static { abstractResponseHashMap.put(DeviceTypeEnum.Liquid.name(), new LiquidResponse()); abstractResponseHashMap.put(DeviceTypeEnum.Well.name(), new WellResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse());//集中器 abstractResponseHashMap.put(DeviceTypeEnum.Methane.name(), new MethaneResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.WasteGas.name(), new WasteGasResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse()); -// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse());//温湿度 +// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse());//噪音开挖 + abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse());//井盖定位 + abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse());//水质检测 } diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..4701fab --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,41 @@ +Manifest-Version: 1.0 +Main-Class: org.well.mysql.sink.WellSink +Class-Path: slf4j-log4j12-1.6.1.jar commons-codec-1.9.jar commons-pool2- + 2.4.2.jar javassist-3.18.1-GA.jar jcl-over-slf4j-1.7.7.jar jansi-1.11.j + ar hawtbuf-1.11.jar jackson-databind-2.4.2.jar mina-core-2.0.4.jar juni + t-4.10.jar log4j-1.2.16.jar dom4j-1.6.1.jar hamcrest-core-1.1.jar orika + -core-1.4.5.jar activemq-pool-5.14.5.jar activemq-client-5.14.5.jar spr + ing-beans-4.0.6.RELEASE.jar httpcore-4.2.1.jar jboss-transaction-api_1. + 2_spec-1.0.0.Final.jar commons-io-2.4.jar concurrentlinkedhashmap-lru-1 + .2_jdk5.jar commons-cli-1.2.jar commons-fileupload-1.3.1.jar jsr305-1.3 + .9.jar gson-2.2.2.jar jackson-jaxrs-json-provider-2.4.2.jar jetty-6.1.2 + 6.jar janino-2.7.6.jar commons-beanutils-1.9.2.jar hibernate-core-4.3.6 + .Final.jar guava-10.0.1.jar spring-context-support-4.0.6.RELEASE.jar lo + g4jdbc-remix-0.2.7.jar hibernate-jpa-2.1-api-1.0.0.Final.jar servlet-ap + i-2.5-20110124.jar geronimo-jms_1.1_spec-1.1.1.jar java-sizeof-0.0.4.ja + r aopalliance-1.0.jar jandex-1.1.0.Final.jar jboss-logging-3.1.3.GA.jar + antlr-2.7.7.jar validation-api-1.1.0.Final.jar geronimo-jta_1.0.1B_spe + c-1.0.1.jar jackson-jaxrs-base-2.4.2.jar logback-core-1.1.2.jar hiberna + te-validator-5.1.2.Final.jar logback-classic-1.1.2.jar avro-1.7.3.jar s + pring-tx-4.0.6.RELEASE.jar spring-orm-4.0.6.RELEASE.jar activemq-broker + -5.14.5.jar kafka-clients-2.4.0-hw-ei-312005.jar commons-pool-1.5.4.jar + ezmorph-1.0.6.jar netty-3.4.0.Final.jar avro-ipc-1.7.3.jar flume-ng-co + re-1.4.0.jar activemq-openwire-legacy-5.14.5.jar commons-compiler-2.7.6 + .jar jetty-util-6.1.26.jar xbean-spring-4.2.jar jackson-core-asl-1.9.3. + jar jackson-mapper-asl-1.9.3.jar spring-core-4.0.6.RELEASE.jar geronimo + -j2ee-management_1.1_spec-1.0.1.jar ojdbc6-11.1.0.7.0.jar json-lib-2.4- + jdk15.jar activemq-jms-pool-5.14.5.jar jul-to-slf4j-1.7.7.jar velocity- + 1.7.jar joda-time-2.1.jar commons-httpclient-3.1.jar spring-context-4.0 + .6.RELEASE.jar commons-lang3-3.3.2.jar commons-lang-2.5.jar libthrift-0 + .7.0.jar jackson-core-2.4.2.jar hibernate-commons-annotations-4.0.5.Fin + al.jar commons-logging-1.1.1.jar fastjson-1.1.15.jar mysql-connector-ja + va-5.1.25.jar spring-aop-4.0.6.RELEASE.jar slf4j-api-1.7.7.jar httpclie + nt-4.2.1.jar flume-ng-configuration-1.4.0.jar jboss-logging-annotations + -1.2.0.Beta1.jar snappy-java-1.0.4.1.jar paranamer-2.3.jar flume-ng-sdk + -1.4.0.jar spring-webmvc-4.0.6.RELEASE.jar jackson-annotations-2.4.0.ja + r jackson-module-jaxb-annotations-2.4.2.jar commons-dbcp-1.4.jar spring + -jms-3.2.8.RELEASE.jar spring-expression-4.0.6.RELEASE.jar jstl-1.2.jar + xml-apis-1.0.b2.jar activemq-spring-5.14.5.jar spring-jdbc-4.0.6.RELEA + SE.jar classmate-1.0.0.jar commons-collections-3.2.1.jar spring-web-4.0 + .6.RELEASE.jar + diff --git a/src/main/resources/wellSensor/77042.jaas.conf b/src/main/resources/wellSensor/77042.jaas.conf new file mode 100644 index 0000000..3abd31a --- /dev/null +++ b/src/main/resources/wellSensor/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/src/main/java/org/well/well/util/ResponseResolver.java b/src/main/java/org/well/well/util/ResponseResolver.java index 6fef231..f73734e 100644 --- a/src/main/java/org/well/well/util/ResponseResolver.java +++ b/src/main/java/org/well/well/util/ResponseResolver.java @@ -20,14 +20,15 @@ static { abstractResponseHashMap.put(DeviceTypeEnum.Liquid.name(), new LiquidResponse()); abstractResponseHashMap.put(DeviceTypeEnum.Well.name(), new WellResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse());//集中器 abstractResponseHashMap.put(DeviceTypeEnum.Methane.name(), new MethaneResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.WasteGas.name(), new WasteGasResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse()); -// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse());//温湿度 +// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse());//噪音开挖 + abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse());//井盖定位 + abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse());//水质检测 } diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..4701fab --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,41 @@ +Manifest-Version: 1.0 +Main-Class: org.well.mysql.sink.WellSink +Class-Path: slf4j-log4j12-1.6.1.jar commons-codec-1.9.jar commons-pool2- + 2.4.2.jar javassist-3.18.1-GA.jar jcl-over-slf4j-1.7.7.jar jansi-1.11.j + ar hawtbuf-1.11.jar jackson-databind-2.4.2.jar mina-core-2.0.4.jar juni + t-4.10.jar log4j-1.2.16.jar dom4j-1.6.1.jar hamcrest-core-1.1.jar orika + -core-1.4.5.jar activemq-pool-5.14.5.jar activemq-client-5.14.5.jar spr + ing-beans-4.0.6.RELEASE.jar httpcore-4.2.1.jar jboss-transaction-api_1. + 2_spec-1.0.0.Final.jar commons-io-2.4.jar concurrentlinkedhashmap-lru-1 + .2_jdk5.jar commons-cli-1.2.jar commons-fileupload-1.3.1.jar jsr305-1.3 + .9.jar gson-2.2.2.jar jackson-jaxrs-json-provider-2.4.2.jar jetty-6.1.2 + 6.jar janino-2.7.6.jar commons-beanutils-1.9.2.jar hibernate-core-4.3.6 + .Final.jar guava-10.0.1.jar spring-context-support-4.0.6.RELEASE.jar lo + g4jdbc-remix-0.2.7.jar hibernate-jpa-2.1-api-1.0.0.Final.jar servlet-ap + i-2.5-20110124.jar geronimo-jms_1.1_spec-1.1.1.jar java-sizeof-0.0.4.ja + r aopalliance-1.0.jar jandex-1.1.0.Final.jar jboss-logging-3.1.3.GA.jar + antlr-2.7.7.jar validation-api-1.1.0.Final.jar geronimo-jta_1.0.1B_spe + c-1.0.1.jar jackson-jaxrs-base-2.4.2.jar logback-core-1.1.2.jar hiberna + te-validator-5.1.2.Final.jar logback-classic-1.1.2.jar avro-1.7.3.jar s + pring-tx-4.0.6.RELEASE.jar spring-orm-4.0.6.RELEASE.jar activemq-broker + -5.14.5.jar kafka-clients-2.4.0-hw-ei-312005.jar commons-pool-1.5.4.jar + ezmorph-1.0.6.jar netty-3.4.0.Final.jar avro-ipc-1.7.3.jar flume-ng-co + re-1.4.0.jar activemq-openwire-legacy-5.14.5.jar commons-compiler-2.7.6 + .jar jetty-util-6.1.26.jar xbean-spring-4.2.jar jackson-core-asl-1.9.3. + jar jackson-mapper-asl-1.9.3.jar spring-core-4.0.6.RELEASE.jar geronimo + -j2ee-management_1.1_spec-1.0.1.jar ojdbc6-11.1.0.7.0.jar json-lib-2.4- + jdk15.jar activemq-jms-pool-5.14.5.jar jul-to-slf4j-1.7.7.jar velocity- + 1.7.jar joda-time-2.1.jar commons-httpclient-3.1.jar spring-context-4.0 + .6.RELEASE.jar commons-lang3-3.3.2.jar commons-lang-2.5.jar libthrift-0 + .7.0.jar jackson-core-2.4.2.jar hibernate-commons-annotations-4.0.5.Fin + al.jar commons-logging-1.1.1.jar fastjson-1.1.15.jar mysql-connector-ja + va-5.1.25.jar spring-aop-4.0.6.RELEASE.jar slf4j-api-1.7.7.jar httpclie + nt-4.2.1.jar flume-ng-configuration-1.4.0.jar jboss-logging-annotations + -1.2.0.Beta1.jar snappy-java-1.0.4.1.jar paranamer-2.3.jar flume-ng-sdk + -1.4.0.jar spring-webmvc-4.0.6.RELEASE.jar jackson-annotations-2.4.0.ja + r jackson-module-jaxb-annotations-2.4.2.jar commons-dbcp-1.4.jar spring + -jms-3.2.8.RELEASE.jar spring-expression-4.0.6.RELEASE.jar jstl-1.2.jar + xml-apis-1.0.b2.jar activemq-spring-5.14.5.jar spring-jdbc-4.0.6.RELEA + SE.jar classmate-1.0.0.jar commons-collections-3.2.1.jar spring-web-4.0 + .6.RELEASE.jar + diff --git a/src/main/resources/wellSensor/77042.jaas.conf b/src/main/resources/wellSensor/77042.jaas.conf new file mode 100644 index 0000000..3abd31a --- /dev/null +++ b/src/main/resources/wellSensor/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/wellSensor/application.properties b/src/main/resources/wellSensor/application.properties index 0c2d25c..a8970d1 100644 --- a/src/main/resources/wellSensor/application.properties +++ b/src/main/resources/wellSensor/application.properties @@ -20,9 +20,9 @@ #db.default.username=sensor #db.default.password=sensor db.default.driverClassName=com.mysql.jdbc.Driver -db.default.url=jdbc:mysql://192.168.0.166:3306/smartwell?useUnicode=true&characterEncoding=UTF-8&useSSL=false +db.default.url=jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?useUnicode=true&characterEncoding=UTF-8&useSSL=false db.default.username=root -db.default.password=root +db.default.password=casic203yz2db ### ============================================================================ # dbReal # ============================================================================ @@ -157,8 +157,8 @@ # ============================================================================ # ACTIVEMQ配置 # ============================================================================ -activemq_url = tcp://192.168.0.203:61616 -activemq_username = +activemq_url = tcp://127.0.0.1:61616 +activemq_username ="" activemq_password = @@ -166,6 +166,13 @@ # 告警、工单推送地址 # ============================================================================ #sendURL =http://111.198.10.15:11302/smartwell/job/updateSinkJob -sendURL =http://localhost:14537/job/updateSinkJob +sendURL =http://192.168.0.218:80/smartwell/job/updateSinkJob + +# ============================================================================ +# kafka大数据平台 + +bootstrapServer=192.168.65.14:21005,192.168.65.15:21005,192.168.65.16:21005 +alarmTopic=MSGQUEUE_8287 +dataTopic=TEMPSTORE_8204 diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/src/main/java/org/well/well/util/ResponseResolver.java b/src/main/java/org/well/well/util/ResponseResolver.java index 6fef231..f73734e 100644 --- a/src/main/java/org/well/well/util/ResponseResolver.java +++ b/src/main/java/org/well/well/util/ResponseResolver.java @@ -20,14 +20,15 @@ static { abstractResponseHashMap.put(DeviceTypeEnum.Liquid.name(), new LiquidResponse()); abstractResponseHashMap.put(DeviceTypeEnum.Well.name(), new WellResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse());//集中器 abstractResponseHashMap.put(DeviceTypeEnum.Methane.name(), new MethaneResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.WasteGas.name(), new WasteGasResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse()); -// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse());//温湿度 +// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse());//噪音开挖 + abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse());//井盖定位 + abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse());//水质检测 } diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..4701fab --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,41 @@ +Manifest-Version: 1.0 +Main-Class: org.well.mysql.sink.WellSink +Class-Path: slf4j-log4j12-1.6.1.jar commons-codec-1.9.jar commons-pool2- + 2.4.2.jar javassist-3.18.1-GA.jar jcl-over-slf4j-1.7.7.jar jansi-1.11.j + ar hawtbuf-1.11.jar jackson-databind-2.4.2.jar mina-core-2.0.4.jar juni + t-4.10.jar log4j-1.2.16.jar dom4j-1.6.1.jar hamcrest-core-1.1.jar orika + -core-1.4.5.jar activemq-pool-5.14.5.jar activemq-client-5.14.5.jar spr + ing-beans-4.0.6.RELEASE.jar httpcore-4.2.1.jar jboss-transaction-api_1. + 2_spec-1.0.0.Final.jar commons-io-2.4.jar concurrentlinkedhashmap-lru-1 + .2_jdk5.jar commons-cli-1.2.jar commons-fileupload-1.3.1.jar jsr305-1.3 + .9.jar gson-2.2.2.jar jackson-jaxrs-json-provider-2.4.2.jar jetty-6.1.2 + 6.jar janino-2.7.6.jar commons-beanutils-1.9.2.jar hibernate-core-4.3.6 + .Final.jar guava-10.0.1.jar spring-context-support-4.0.6.RELEASE.jar lo + g4jdbc-remix-0.2.7.jar hibernate-jpa-2.1-api-1.0.0.Final.jar servlet-ap + i-2.5-20110124.jar geronimo-jms_1.1_spec-1.1.1.jar java-sizeof-0.0.4.ja + r aopalliance-1.0.jar jandex-1.1.0.Final.jar jboss-logging-3.1.3.GA.jar + antlr-2.7.7.jar validation-api-1.1.0.Final.jar geronimo-jta_1.0.1B_spe + c-1.0.1.jar jackson-jaxrs-base-2.4.2.jar logback-core-1.1.2.jar hiberna + te-validator-5.1.2.Final.jar logback-classic-1.1.2.jar avro-1.7.3.jar s + pring-tx-4.0.6.RELEASE.jar spring-orm-4.0.6.RELEASE.jar activemq-broker + -5.14.5.jar kafka-clients-2.4.0-hw-ei-312005.jar commons-pool-1.5.4.jar + ezmorph-1.0.6.jar netty-3.4.0.Final.jar avro-ipc-1.7.3.jar flume-ng-co + re-1.4.0.jar activemq-openwire-legacy-5.14.5.jar commons-compiler-2.7.6 + .jar jetty-util-6.1.26.jar xbean-spring-4.2.jar jackson-core-asl-1.9.3. + jar jackson-mapper-asl-1.9.3.jar spring-core-4.0.6.RELEASE.jar geronimo + -j2ee-management_1.1_spec-1.0.1.jar ojdbc6-11.1.0.7.0.jar json-lib-2.4- + jdk15.jar activemq-jms-pool-5.14.5.jar jul-to-slf4j-1.7.7.jar velocity- + 1.7.jar joda-time-2.1.jar commons-httpclient-3.1.jar spring-context-4.0 + .6.RELEASE.jar commons-lang3-3.3.2.jar commons-lang-2.5.jar libthrift-0 + .7.0.jar jackson-core-2.4.2.jar hibernate-commons-annotations-4.0.5.Fin + al.jar commons-logging-1.1.1.jar fastjson-1.1.15.jar mysql-connector-ja + va-5.1.25.jar spring-aop-4.0.6.RELEASE.jar slf4j-api-1.7.7.jar httpclie + nt-4.2.1.jar flume-ng-configuration-1.4.0.jar jboss-logging-annotations + -1.2.0.Beta1.jar snappy-java-1.0.4.1.jar paranamer-2.3.jar flume-ng-sdk + -1.4.0.jar spring-webmvc-4.0.6.RELEASE.jar jackson-annotations-2.4.0.ja + r jackson-module-jaxb-annotations-2.4.2.jar commons-dbcp-1.4.jar spring + -jms-3.2.8.RELEASE.jar spring-expression-4.0.6.RELEASE.jar jstl-1.2.jar + xml-apis-1.0.b2.jar activemq-spring-5.14.5.jar spring-jdbc-4.0.6.RELEA + SE.jar classmate-1.0.0.jar commons-collections-3.2.1.jar spring-web-4.0 + .6.RELEASE.jar + diff --git a/src/main/resources/wellSensor/77042.jaas.conf b/src/main/resources/wellSensor/77042.jaas.conf new file mode 100644 index 0000000..3abd31a --- /dev/null +++ b/src/main/resources/wellSensor/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/wellSensor/application.properties b/src/main/resources/wellSensor/application.properties index 0c2d25c..a8970d1 100644 --- a/src/main/resources/wellSensor/application.properties +++ b/src/main/resources/wellSensor/application.properties @@ -20,9 +20,9 @@ #db.default.username=sensor #db.default.password=sensor db.default.driverClassName=com.mysql.jdbc.Driver -db.default.url=jdbc:mysql://192.168.0.166:3306/smartwell?useUnicode=true&characterEncoding=UTF-8&useSSL=false +db.default.url=jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?useUnicode=true&characterEncoding=UTF-8&useSSL=false db.default.username=root -db.default.password=root +db.default.password=casic203yz2db ### ============================================================================ # dbReal # ============================================================================ @@ -157,8 +157,8 @@ # ============================================================================ # ACTIVEMQ配置 # ============================================================================ -activemq_url = tcp://192.168.0.203:61616 -activemq_username = +activemq_url = tcp://127.0.0.1:61616 +activemq_username ="" activemq_password = @@ -166,6 +166,13 @@ # 告警、工单推送地址 # ============================================================================ #sendURL =http://111.198.10.15:11302/smartwell/job/updateSinkJob -sendURL =http://localhost:14537/job/updateSinkJob +sendURL =http://192.168.0.218:80/smartwell/job/updateSinkJob + +# ============================================================================ +# kafka大数据平台 + +bootstrapServer=192.168.65.14:21005,192.168.65.15:21005,192.168.65.16:21005 +alarmTopic=MSGQUEUE_8287 +dataTopic=TEMPSTORE_8204 diff --git a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml index a11375d..4b1a083 100644 --- a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml +++ b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml @@ -16,8 +16,7 @@ + userName="${activemq_username}"/> diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/src/main/java/org/well/well/util/ResponseResolver.java b/src/main/java/org/well/well/util/ResponseResolver.java index 6fef231..f73734e 100644 --- a/src/main/java/org/well/well/util/ResponseResolver.java +++ b/src/main/java/org/well/well/util/ResponseResolver.java @@ -20,14 +20,15 @@ static { abstractResponseHashMap.put(DeviceTypeEnum.Liquid.name(), new LiquidResponse()); abstractResponseHashMap.put(DeviceTypeEnum.Well.name(), new WellResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse());//集中器 abstractResponseHashMap.put(DeviceTypeEnum.Methane.name(), new MethaneResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.WasteGas.name(), new WasteGasResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse()); -// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse());//温湿度 +// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse());//噪音开挖 + abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse());//井盖定位 + abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse());//水质检测 } diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..4701fab --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,41 @@ +Manifest-Version: 1.0 +Main-Class: org.well.mysql.sink.WellSink +Class-Path: slf4j-log4j12-1.6.1.jar commons-codec-1.9.jar commons-pool2- + 2.4.2.jar javassist-3.18.1-GA.jar jcl-over-slf4j-1.7.7.jar jansi-1.11.j + ar hawtbuf-1.11.jar jackson-databind-2.4.2.jar mina-core-2.0.4.jar juni + t-4.10.jar log4j-1.2.16.jar dom4j-1.6.1.jar hamcrest-core-1.1.jar orika + -core-1.4.5.jar activemq-pool-5.14.5.jar activemq-client-5.14.5.jar spr + ing-beans-4.0.6.RELEASE.jar httpcore-4.2.1.jar jboss-transaction-api_1. + 2_spec-1.0.0.Final.jar commons-io-2.4.jar concurrentlinkedhashmap-lru-1 + .2_jdk5.jar commons-cli-1.2.jar commons-fileupload-1.3.1.jar jsr305-1.3 + .9.jar gson-2.2.2.jar jackson-jaxrs-json-provider-2.4.2.jar jetty-6.1.2 + 6.jar janino-2.7.6.jar commons-beanutils-1.9.2.jar hibernate-core-4.3.6 + .Final.jar guava-10.0.1.jar spring-context-support-4.0.6.RELEASE.jar lo + g4jdbc-remix-0.2.7.jar hibernate-jpa-2.1-api-1.0.0.Final.jar servlet-ap + i-2.5-20110124.jar geronimo-jms_1.1_spec-1.1.1.jar java-sizeof-0.0.4.ja + r aopalliance-1.0.jar jandex-1.1.0.Final.jar jboss-logging-3.1.3.GA.jar + antlr-2.7.7.jar validation-api-1.1.0.Final.jar geronimo-jta_1.0.1B_spe + c-1.0.1.jar jackson-jaxrs-base-2.4.2.jar logback-core-1.1.2.jar hiberna + te-validator-5.1.2.Final.jar logback-classic-1.1.2.jar avro-1.7.3.jar s + pring-tx-4.0.6.RELEASE.jar spring-orm-4.0.6.RELEASE.jar activemq-broker + -5.14.5.jar kafka-clients-2.4.0-hw-ei-312005.jar commons-pool-1.5.4.jar + ezmorph-1.0.6.jar netty-3.4.0.Final.jar avro-ipc-1.7.3.jar flume-ng-co + re-1.4.0.jar activemq-openwire-legacy-5.14.5.jar commons-compiler-2.7.6 + .jar jetty-util-6.1.26.jar xbean-spring-4.2.jar jackson-core-asl-1.9.3. + jar jackson-mapper-asl-1.9.3.jar spring-core-4.0.6.RELEASE.jar geronimo + -j2ee-management_1.1_spec-1.0.1.jar ojdbc6-11.1.0.7.0.jar json-lib-2.4- + jdk15.jar activemq-jms-pool-5.14.5.jar jul-to-slf4j-1.7.7.jar velocity- + 1.7.jar joda-time-2.1.jar commons-httpclient-3.1.jar spring-context-4.0 + .6.RELEASE.jar commons-lang3-3.3.2.jar commons-lang-2.5.jar libthrift-0 + .7.0.jar jackson-core-2.4.2.jar hibernate-commons-annotations-4.0.5.Fin + al.jar commons-logging-1.1.1.jar fastjson-1.1.15.jar mysql-connector-ja + va-5.1.25.jar spring-aop-4.0.6.RELEASE.jar slf4j-api-1.7.7.jar httpclie + nt-4.2.1.jar flume-ng-configuration-1.4.0.jar jboss-logging-annotations + -1.2.0.Beta1.jar snappy-java-1.0.4.1.jar paranamer-2.3.jar flume-ng-sdk + -1.4.0.jar spring-webmvc-4.0.6.RELEASE.jar jackson-annotations-2.4.0.ja + r jackson-module-jaxb-annotations-2.4.2.jar commons-dbcp-1.4.jar spring + -jms-3.2.8.RELEASE.jar spring-expression-4.0.6.RELEASE.jar jstl-1.2.jar + xml-apis-1.0.b2.jar activemq-spring-5.14.5.jar spring-jdbc-4.0.6.RELEA + SE.jar classmate-1.0.0.jar commons-collections-3.2.1.jar spring-web-4.0 + .6.RELEASE.jar + diff --git a/src/main/resources/wellSensor/77042.jaas.conf b/src/main/resources/wellSensor/77042.jaas.conf new file mode 100644 index 0000000..3abd31a --- /dev/null +++ b/src/main/resources/wellSensor/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/wellSensor/application.properties b/src/main/resources/wellSensor/application.properties index 0c2d25c..a8970d1 100644 --- a/src/main/resources/wellSensor/application.properties +++ b/src/main/resources/wellSensor/application.properties @@ -20,9 +20,9 @@ #db.default.username=sensor #db.default.password=sensor db.default.driverClassName=com.mysql.jdbc.Driver -db.default.url=jdbc:mysql://192.168.0.166:3306/smartwell?useUnicode=true&characterEncoding=UTF-8&useSSL=false +db.default.url=jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?useUnicode=true&characterEncoding=UTF-8&useSSL=false db.default.username=root -db.default.password=root +db.default.password=casic203yz2db ### ============================================================================ # dbReal # ============================================================================ @@ -157,8 +157,8 @@ # ============================================================================ # ACTIVEMQ配置 # ============================================================================ -activemq_url = tcp://192.168.0.203:61616 -activemq_username = +activemq_url = tcp://127.0.0.1:61616 +activemq_username ="" activemq_password = @@ -166,6 +166,13 @@ # 告警、工单推送地址 # ============================================================================ #sendURL =http://111.198.10.15:11302/smartwell/job/updateSinkJob -sendURL =http://localhost:14537/job/updateSinkJob +sendURL =http://192.168.0.218:80/smartwell/job/updateSinkJob + +# ============================================================================ +# kafka大数据平台 + +bootstrapServer=192.168.65.14:21005,192.168.65.15:21005,192.168.65.16:21005 +alarmTopic=MSGQUEUE_8287 +dataTopic=TEMPSTORE_8204 diff --git a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml index a11375d..4b1a083 100644 --- a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml +++ b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml @@ -16,8 +16,7 @@ + userName="${activemq_username}"/> diff --git a/src/main/resources/wellSensor/consumer.properties b/src/main/resources/wellSensor/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/wellSensor/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/src/main/java/org/well/well/util/ResponseResolver.java b/src/main/java/org/well/well/util/ResponseResolver.java index 6fef231..f73734e 100644 --- a/src/main/java/org/well/well/util/ResponseResolver.java +++ b/src/main/java/org/well/well/util/ResponseResolver.java @@ -20,14 +20,15 @@ static { abstractResponseHashMap.put(DeviceTypeEnum.Liquid.name(), new LiquidResponse()); abstractResponseHashMap.put(DeviceTypeEnum.Well.name(), new WellResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse());//集中器 abstractResponseHashMap.put(DeviceTypeEnum.Methane.name(), new MethaneResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.WasteGas.name(), new WasteGasResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse()); -// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse());//温湿度 +// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse());//噪音开挖 + abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse());//井盖定位 + abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse());//水质检测 } diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..4701fab --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,41 @@ +Manifest-Version: 1.0 +Main-Class: org.well.mysql.sink.WellSink +Class-Path: slf4j-log4j12-1.6.1.jar commons-codec-1.9.jar commons-pool2- + 2.4.2.jar javassist-3.18.1-GA.jar jcl-over-slf4j-1.7.7.jar jansi-1.11.j + ar hawtbuf-1.11.jar jackson-databind-2.4.2.jar mina-core-2.0.4.jar juni + t-4.10.jar log4j-1.2.16.jar dom4j-1.6.1.jar hamcrest-core-1.1.jar orika + -core-1.4.5.jar activemq-pool-5.14.5.jar activemq-client-5.14.5.jar spr + ing-beans-4.0.6.RELEASE.jar httpcore-4.2.1.jar jboss-transaction-api_1. + 2_spec-1.0.0.Final.jar commons-io-2.4.jar concurrentlinkedhashmap-lru-1 + .2_jdk5.jar commons-cli-1.2.jar commons-fileupload-1.3.1.jar jsr305-1.3 + .9.jar gson-2.2.2.jar jackson-jaxrs-json-provider-2.4.2.jar jetty-6.1.2 + 6.jar janino-2.7.6.jar commons-beanutils-1.9.2.jar hibernate-core-4.3.6 + .Final.jar guava-10.0.1.jar spring-context-support-4.0.6.RELEASE.jar lo + g4jdbc-remix-0.2.7.jar hibernate-jpa-2.1-api-1.0.0.Final.jar servlet-ap + i-2.5-20110124.jar geronimo-jms_1.1_spec-1.1.1.jar java-sizeof-0.0.4.ja + r aopalliance-1.0.jar jandex-1.1.0.Final.jar jboss-logging-3.1.3.GA.jar + antlr-2.7.7.jar validation-api-1.1.0.Final.jar geronimo-jta_1.0.1B_spe + c-1.0.1.jar jackson-jaxrs-base-2.4.2.jar logback-core-1.1.2.jar hiberna + te-validator-5.1.2.Final.jar logback-classic-1.1.2.jar avro-1.7.3.jar s + pring-tx-4.0.6.RELEASE.jar spring-orm-4.0.6.RELEASE.jar activemq-broker + -5.14.5.jar kafka-clients-2.4.0-hw-ei-312005.jar commons-pool-1.5.4.jar + ezmorph-1.0.6.jar netty-3.4.0.Final.jar avro-ipc-1.7.3.jar flume-ng-co + re-1.4.0.jar activemq-openwire-legacy-5.14.5.jar commons-compiler-2.7.6 + .jar jetty-util-6.1.26.jar xbean-spring-4.2.jar jackson-core-asl-1.9.3. + jar jackson-mapper-asl-1.9.3.jar spring-core-4.0.6.RELEASE.jar geronimo + -j2ee-management_1.1_spec-1.0.1.jar ojdbc6-11.1.0.7.0.jar json-lib-2.4- + jdk15.jar activemq-jms-pool-5.14.5.jar jul-to-slf4j-1.7.7.jar velocity- + 1.7.jar joda-time-2.1.jar commons-httpclient-3.1.jar spring-context-4.0 + .6.RELEASE.jar commons-lang3-3.3.2.jar commons-lang-2.5.jar libthrift-0 + .7.0.jar jackson-core-2.4.2.jar hibernate-commons-annotations-4.0.5.Fin + al.jar commons-logging-1.1.1.jar fastjson-1.1.15.jar mysql-connector-ja + va-5.1.25.jar spring-aop-4.0.6.RELEASE.jar slf4j-api-1.7.7.jar httpclie + nt-4.2.1.jar flume-ng-configuration-1.4.0.jar jboss-logging-annotations + -1.2.0.Beta1.jar snappy-java-1.0.4.1.jar paranamer-2.3.jar flume-ng-sdk + -1.4.0.jar spring-webmvc-4.0.6.RELEASE.jar jackson-annotations-2.4.0.ja + r jackson-module-jaxb-annotations-2.4.2.jar commons-dbcp-1.4.jar spring + -jms-3.2.8.RELEASE.jar spring-expression-4.0.6.RELEASE.jar jstl-1.2.jar + xml-apis-1.0.b2.jar activemq-spring-5.14.5.jar spring-jdbc-4.0.6.RELEA + SE.jar classmate-1.0.0.jar commons-collections-3.2.1.jar spring-web-4.0 + .6.RELEASE.jar + diff --git a/src/main/resources/wellSensor/77042.jaas.conf b/src/main/resources/wellSensor/77042.jaas.conf new file mode 100644 index 0000000..3abd31a --- /dev/null +++ b/src/main/resources/wellSensor/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/wellSensor/application.properties b/src/main/resources/wellSensor/application.properties index 0c2d25c..a8970d1 100644 --- a/src/main/resources/wellSensor/application.properties +++ b/src/main/resources/wellSensor/application.properties @@ -20,9 +20,9 @@ #db.default.username=sensor #db.default.password=sensor db.default.driverClassName=com.mysql.jdbc.Driver -db.default.url=jdbc:mysql://192.168.0.166:3306/smartwell?useUnicode=true&characterEncoding=UTF-8&useSSL=false +db.default.url=jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?useUnicode=true&characterEncoding=UTF-8&useSSL=false db.default.username=root -db.default.password=root +db.default.password=casic203yz2db ### ============================================================================ # dbReal # ============================================================================ @@ -157,8 +157,8 @@ # ============================================================================ # ACTIVEMQ配置 # ============================================================================ -activemq_url = tcp://192.168.0.203:61616 -activemq_username = +activemq_url = tcp://127.0.0.1:61616 +activemq_username ="" activemq_password = @@ -166,6 +166,13 @@ # 告警、工单推送地址 # ============================================================================ #sendURL =http://111.198.10.15:11302/smartwell/job/updateSinkJob -sendURL =http://localhost:14537/job/updateSinkJob +sendURL =http://192.168.0.218:80/smartwell/job/updateSinkJob + +# ============================================================================ +# kafka大数据平台 + +bootstrapServer=192.168.65.14:21005,192.168.65.15:21005,192.168.65.16:21005 +alarmTopic=MSGQUEUE_8287 +dataTopic=TEMPSTORE_8204 diff --git a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml index a11375d..4b1a083 100644 --- a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml +++ b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml @@ -16,8 +16,7 @@ + userName="${activemq_username}"/> diff --git a/src/main/resources/wellSensor/consumer.properties b/src/main/resources/wellSensor/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/wellSensor/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/wellSensor/kafkaSecurityMode b/src/main/resources/wellSensor/kafkaSecurityMode new file mode 100644 index 0000000..ed59a5e --- /dev/null +++ b/src/main/resources/wellSensor/kafkaSecurityMode @@ -0,0 +1 @@ +kafka.client.security.mode = yes diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/src/main/java/org/well/well/util/ResponseResolver.java b/src/main/java/org/well/well/util/ResponseResolver.java index 6fef231..f73734e 100644 --- a/src/main/java/org/well/well/util/ResponseResolver.java +++ b/src/main/java/org/well/well/util/ResponseResolver.java @@ -20,14 +20,15 @@ static { abstractResponseHashMap.put(DeviceTypeEnum.Liquid.name(), new LiquidResponse()); abstractResponseHashMap.put(DeviceTypeEnum.Well.name(), new WellResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse());//集中器 abstractResponseHashMap.put(DeviceTypeEnum.Methane.name(), new MethaneResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.WasteGas.name(), new WasteGasResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse()); -// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse());//温湿度 +// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse());//噪音开挖 + abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse());//井盖定位 + abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse());//水质检测 } diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..4701fab --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,41 @@ +Manifest-Version: 1.0 +Main-Class: org.well.mysql.sink.WellSink +Class-Path: slf4j-log4j12-1.6.1.jar commons-codec-1.9.jar commons-pool2- + 2.4.2.jar javassist-3.18.1-GA.jar jcl-over-slf4j-1.7.7.jar jansi-1.11.j + ar hawtbuf-1.11.jar jackson-databind-2.4.2.jar mina-core-2.0.4.jar juni + t-4.10.jar log4j-1.2.16.jar dom4j-1.6.1.jar hamcrest-core-1.1.jar orika + -core-1.4.5.jar activemq-pool-5.14.5.jar activemq-client-5.14.5.jar spr + ing-beans-4.0.6.RELEASE.jar httpcore-4.2.1.jar jboss-transaction-api_1. + 2_spec-1.0.0.Final.jar commons-io-2.4.jar concurrentlinkedhashmap-lru-1 + .2_jdk5.jar commons-cli-1.2.jar commons-fileupload-1.3.1.jar jsr305-1.3 + .9.jar gson-2.2.2.jar jackson-jaxrs-json-provider-2.4.2.jar jetty-6.1.2 + 6.jar janino-2.7.6.jar commons-beanutils-1.9.2.jar hibernate-core-4.3.6 + .Final.jar guava-10.0.1.jar spring-context-support-4.0.6.RELEASE.jar lo + g4jdbc-remix-0.2.7.jar hibernate-jpa-2.1-api-1.0.0.Final.jar servlet-ap + i-2.5-20110124.jar geronimo-jms_1.1_spec-1.1.1.jar java-sizeof-0.0.4.ja + r aopalliance-1.0.jar jandex-1.1.0.Final.jar jboss-logging-3.1.3.GA.jar + antlr-2.7.7.jar validation-api-1.1.0.Final.jar geronimo-jta_1.0.1B_spe + c-1.0.1.jar jackson-jaxrs-base-2.4.2.jar logback-core-1.1.2.jar hiberna + te-validator-5.1.2.Final.jar logback-classic-1.1.2.jar avro-1.7.3.jar s + pring-tx-4.0.6.RELEASE.jar spring-orm-4.0.6.RELEASE.jar activemq-broker + -5.14.5.jar kafka-clients-2.4.0-hw-ei-312005.jar commons-pool-1.5.4.jar + ezmorph-1.0.6.jar netty-3.4.0.Final.jar avro-ipc-1.7.3.jar flume-ng-co + re-1.4.0.jar activemq-openwire-legacy-5.14.5.jar commons-compiler-2.7.6 + .jar jetty-util-6.1.26.jar xbean-spring-4.2.jar jackson-core-asl-1.9.3. + jar jackson-mapper-asl-1.9.3.jar spring-core-4.0.6.RELEASE.jar geronimo + -j2ee-management_1.1_spec-1.0.1.jar ojdbc6-11.1.0.7.0.jar json-lib-2.4- + jdk15.jar activemq-jms-pool-5.14.5.jar jul-to-slf4j-1.7.7.jar velocity- + 1.7.jar joda-time-2.1.jar commons-httpclient-3.1.jar spring-context-4.0 + .6.RELEASE.jar commons-lang3-3.3.2.jar commons-lang-2.5.jar libthrift-0 + .7.0.jar jackson-core-2.4.2.jar hibernate-commons-annotations-4.0.5.Fin + al.jar commons-logging-1.1.1.jar fastjson-1.1.15.jar mysql-connector-ja + va-5.1.25.jar spring-aop-4.0.6.RELEASE.jar slf4j-api-1.7.7.jar httpclie + nt-4.2.1.jar flume-ng-configuration-1.4.0.jar jboss-logging-annotations + -1.2.0.Beta1.jar snappy-java-1.0.4.1.jar paranamer-2.3.jar flume-ng-sdk + -1.4.0.jar spring-webmvc-4.0.6.RELEASE.jar jackson-annotations-2.4.0.ja + r jackson-module-jaxb-annotations-2.4.2.jar commons-dbcp-1.4.jar spring + -jms-3.2.8.RELEASE.jar spring-expression-4.0.6.RELEASE.jar jstl-1.2.jar + xml-apis-1.0.b2.jar activemq-spring-5.14.5.jar spring-jdbc-4.0.6.RELEA + SE.jar classmate-1.0.0.jar commons-collections-3.2.1.jar spring-web-4.0 + .6.RELEASE.jar + diff --git a/src/main/resources/wellSensor/77042.jaas.conf b/src/main/resources/wellSensor/77042.jaas.conf new file mode 100644 index 0000000..3abd31a --- /dev/null +++ b/src/main/resources/wellSensor/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/wellSensor/application.properties b/src/main/resources/wellSensor/application.properties index 0c2d25c..a8970d1 100644 --- a/src/main/resources/wellSensor/application.properties +++ b/src/main/resources/wellSensor/application.properties @@ -20,9 +20,9 @@ #db.default.username=sensor #db.default.password=sensor db.default.driverClassName=com.mysql.jdbc.Driver -db.default.url=jdbc:mysql://192.168.0.166:3306/smartwell?useUnicode=true&characterEncoding=UTF-8&useSSL=false +db.default.url=jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?useUnicode=true&characterEncoding=UTF-8&useSSL=false db.default.username=root -db.default.password=root +db.default.password=casic203yz2db ### ============================================================================ # dbReal # ============================================================================ @@ -157,8 +157,8 @@ # ============================================================================ # ACTIVEMQ配置 # ============================================================================ -activemq_url = tcp://192.168.0.203:61616 -activemq_username = +activemq_url = tcp://127.0.0.1:61616 +activemq_username ="" activemq_password = @@ -166,6 +166,13 @@ # 告警、工单推送地址 # ============================================================================ #sendURL =http://111.198.10.15:11302/smartwell/job/updateSinkJob -sendURL =http://localhost:14537/job/updateSinkJob +sendURL =http://192.168.0.218:80/smartwell/job/updateSinkJob + +# ============================================================================ +# kafka大数据平台 + +bootstrapServer=192.168.65.14:21005,192.168.65.15:21005,192.168.65.16:21005 +alarmTopic=MSGQUEUE_8287 +dataTopic=TEMPSTORE_8204 diff --git a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml index a11375d..4b1a083 100644 --- a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml +++ b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml @@ -16,8 +16,7 @@ + userName="${activemq_username}"/> diff --git a/src/main/resources/wellSensor/consumer.properties b/src/main/resources/wellSensor/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/wellSensor/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/wellSensor/kafkaSecurityMode b/src/main/resources/wellSensor/kafkaSecurityMode new file mode 100644 index 0000000..ed59a5e --- /dev/null +++ b/src/main/resources/wellSensor/kafkaSecurityMode @@ -0,0 +1 @@ +kafka.client.security.mode = yes diff --git a/src/main/resources/wellSensor/krb5.conf b/src/main/resources/wellSensor/krb5.conf new file mode 100644 index 0000000..003c6c7 --- /dev/null +++ b/src/main/resources/wellSensor/krb5.conf @@ -0,0 +1,48 @@ +[kdcdefaults] +kdc_ports = 192.168.65.19:21732 +kdc_tcp_ports = "" + +[libdefaults] +default_realm = HADOOP.COM +kdc_timeout = 2500 +clockskew = 300 +use_dns_lookup = 0 +udp_preference_limit = 1465 +max_retries = 5 +dns_lookup_kdc = false +dns_lookup_realm = false +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +allow_extend_version = false +default_ccache_name = FILE:/tmp//krb5cc_%{uid} + +[realms] +HADOOP.COM = { +kdc = 192.168.65.19:21732 +kdc = 192.168.65.18:21732 +admin_server = 192.168.65.19:21730 +admin_server = 192.168.65.18:21730 +kpasswd_server = 192.168.65.19:21731 +kpasswd_server = 192.168.65.18:21731 +kpasswd_port = 21731 +kadmind_port = 21730 +kadmind_listen = 192.168.65.19:21730 +kpasswd_listen = 192.168.65.19:21731 +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +acl_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/kadm5.acl +dict_file = /opt/huawei/Bigdata/common/runtime/security/weakPasswdDic/weakPasswdForKdc.ini +key_stash_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/.k5.HADOOP.COM +} + +[domain_realm] +.hadoop.com = HADOOP.COM + +[logging] +kdc = SYSLOG:INFO:DAEMON +admin_server = SYSLOG:INFO:DAEMON +default = SYSLOG:NOTICE:DAEMON diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/src/main/java/org/well/well/util/ResponseResolver.java b/src/main/java/org/well/well/util/ResponseResolver.java index 6fef231..f73734e 100644 --- a/src/main/java/org/well/well/util/ResponseResolver.java +++ b/src/main/java/org/well/well/util/ResponseResolver.java @@ -20,14 +20,15 @@ static { abstractResponseHashMap.put(DeviceTypeEnum.Liquid.name(), new LiquidResponse()); abstractResponseHashMap.put(DeviceTypeEnum.Well.name(), new WellResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse());//集中器 abstractResponseHashMap.put(DeviceTypeEnum.Methane.name(), new MethaneResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.WasteGas.name(), new WasteGasResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse()); -// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse());//温湿度 +// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse());//噪音开挖 + abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse());//井盖定位 + abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse());//水质检测 } diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..4701fab --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,41 @@ +Manifest-Version: 1.0 +Main-Class: org.well.mysql.sink.WellSink +Class-Path: slf4j-log4j12-1.6.1.jar commons-codec-1.9.jar commons-pool2- + 2.4.2.jar javassist-3.18.1-GA.jar jcl-over-slf4j-1.7.7.jar jansi-1.11.j + ar hawtbuf-1.11.jar jackson-databind-2.4.2.jar mina-core-2.0.4.jar juni + t-4.10.jar log4j-1.2.16.jar dom4j-1.6.1.jar hamcrest-core-1.1.jar orika + -core-1.4.5.jar activemq-pool-5.14.5.jar activemq-client-5.14.5.jar spr + ing-beans-4.0.6.RELEASE.jar httpcore-4.2.1.jar jboss-transaction-api_1. + 2_spec-1.0.0.Final.jar commons-io-2.4.jar concurrentlinkedhashmap-lru-1 + .2_jdk5.jar commons-cli-1.2.jar commons-fileupload-1.3.1.jar jsr305-1.3 + .9.jar gson-2.2.2.jar jackson-jaxrs-json-provider-2.4.2.jar jetty-6.1.2 + 6.jar janino-2.7.6.jar commons-beanutils-1.9.2.jar hibernate-core-4.3.6 + .Final.jar guava-10.0.1.jar spring-context-support-4.0.6.RELEASE.jar lo + g4jdbc-remix-0.2.7.jar hibernate-jpa-2.1-api-1.0.0.Final.jar servlet-ap + i-2.5-20110124.jar geronimo-jms_1.1_spec-1.1.1.jar java-sizeof-0.0.4.ja + r aopalliance-1.0.jar jandex-1.1.0.Final.jar jboss-logging-3.1.3.GA.jar + antlr-2.7.7.jar validation-api-1.1.0.Final.jar geronimo-jta_1.0.1B_spe + c-1.0.1.jar jackson-jaxrs-base-2.4.2.jar logback-core-1.1.2.jar hiberna + te-validator-5.1.2.Final.jar logback-classic-1.1.2.jar avro-1.7.3.jar s + pring-tx-4.0.6.RELEASE.jar spring-orm-4.0.6.RELEASE.jar activemq-broker + -5.14.5.jar kafka-clients-2.4.0-hw-ei-312005.jar commons-pool-1.5.4.jar + ezmorph-1.0.6.jar netty-3.4.0.Final.jar avro-ipc-1.7.3.jar flume-ng-co + re-1.4.0.jar activemq-openwire-legacy-5.14.5.jar commons-compiler-2.7.6 + .jar jetty-util-6.1.26.jar xbean-spring-4.2.jar jackson-core-asl-1.9.3. + jar jackson-mapper-asl-1.9.3.jar spring-core-4.0.6.RELEASE.jar geronimo + -j2ee-management_1.1_spec-1.0.1.jar ojdbc6-11.1.0.7.0.jar json-lib-2.4- + jdk15.jar activemq-jms-pool-5.14.5.jar jul-to-slf4j-1.7.7.jar velocity- + 1.7.jar joda-time-2.1.jar commons-httpclient-3.1.jar spring-context-4.0 + .6.RELEASE.jar commons-lang3-3.3.2.jar commons-lang-2.5.jar libthrift-0 + .7.0.jar jackson-core-2.4.2.jar hibernate-commons-annotations-4.0.5.Fin + al.jar commons-logging-1.1.1.jar fastjson-1.1.15.jar mysql-connector-ja + va-5.1.25.jar spring-aop-4.0.6.RELEASE.jar slf4j-api-1.7.7.jar httpclie + nt-4.2.1.jar flume-ng-configuration-1.4.0.jar jboss-logging-annotations + -1.2.0.Beta1.jar snappy-java-1.0.4.1.jar paranamer-2.3.jar flume-ng-sdk + -1.4.0.jar spring-webmvc-4.0.6.RELEASE.jar jackson-annotations-2.4.0.ja + r jackson-module-jaxb-annotations-2.4.2.jar commons-dbcp-1.4.jar spring + -jms-3.2.8.RELEASE.jar spring-expression-4.0.6.RELEASE.jar jstl-1.2.jar + xml-apis-1.0.b2.jar activemq-spring-5.14.5.jar spring-jdbc-4.0.6.RELEA + SE.jar classmate-1.0.0.jar commons-collections-3.2.1.jar spring-web-4.0 + .6.RELEASE.jar + diff --git a/src/main/resources/wellSensor/77042.jaas.conf b/src/main/resources/wellSensor/77042.jaas.conf new file mode 100644 index 0000000..3abd31a --- /dev/null +++ b/src/main/resources/wellSensor/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/wellSensor/application.properties b/src/main/resources/wellSensor/application.properties index 0c2d25c..a8970d1 100644 --- a/src/main/resources/wellSensor/application.properties +++ b/src/main/resources/wellSensor/application.properties @@ -20,9 +20,9 @@ #db.default.username=sensor #db.default.password=sensor db.default.driverClassName=com.mysql.jdbc.Driver -db.default.url=jdbc:mysql://192.168.0.166:3306/smartwell?useUnicode=true&characterEncoding=UTF-8&useSSL=false +db.default.url=jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?useUnicode=true&characterEncoding=UTF-8&useSSL=false db.default.username=root -db.default.password=root +db.default.password=casic203yz2db ### ============================================================================ # dbReal # ============================================================================ @@ -157,8 +157,8 @@ # ============================================================================ # ACTIVEMQ配置 # ============================================================================ -activemq_url = tcp://192.168.0.203:61616 -activemq_username = +activemq_url = tcp://127.0.0.1:61616 +activemq_username ="" activemq_password = @@ -166,6 +166,13 @@ # 告警、工单推送地址 # ============================================================================ #sendURL =http://111.198.10.15:11302/smartwell/job/updateSinkJob -sendURL =http://localhost:14537/job/updateSinkJob +sendURL =http://192.168.0.218:80/smartwell/job/updateSinkJob + +# ============================================================================ +# kafka大数据平台 + +bootstrapServer=192.168.65.14:21005,192.168.65.15:21005,192.168.65.16:21005 +alarmTopic=MSGQUEUE_8287 +dataTopic=TEMPSTORE_8204 diff --git a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml index a11375d..4b1a083 100644 --- a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml +++ b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml @@ -16,8 +16,7 @@ + userName="${activemq_username}"/> diff --git a/src/main/resources/wellSensor/consumer.properties b/src/main/resources/wellSensor/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/wellSensor/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/wellSensor/kafkaSecurityMode b/src/main/resources/wellSensor/kafkaSecurityMode new file mode 100644 index 0000000..ed59a5e --- /dev/null +++ b/src/main/resources/wellSensor/kafkaSecurityMode @@ -0,0 +1 @@ +kafka.client.security.mode = yes diff --git a/src/main/resources/wellSensor/krb5.conf b/src/main/resources/wellSensor/krb5.conf new file mode 100644 index 0000000..003c6c7 --- /dev/null +++ b/src/main/resources/wellSensor/krb5.conf @@ -0,0 +1,48 @@ +[kdcdefaults] +kdc_ports = 192.168.65.19:21732 +kdc_tcp_ports = "" + +[libdefaults] +default_realm = HADOOP.COM +kdc_timeout = 2500 +clockskew = 300 +use_dns_lookup = 0 +udp_preference_limit = 1465 +max_retries = 5 +dns_lookup_kdc = false +dns_lookup_realm = false +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +allow_extend_version = false +default_ccache_name = FILE:/tmp//krb5cc_%{uid} + +[realms] +HADOOP.COM = { +kdc = 192.168.65.19:21732 +kdc = 192.168.65.18:21732 +admin_server = 192.168.65.19:21730 +admin_server = 192.168.65.18:21730 +kpasswd_server = 192.168.65.19:21731 +kpasswd_server = 192.168.65.18:21731 +kpasswd_port = 21731 +kadmind_port = 21730 +kadmind_listen = 192.168.65.19:21730 +kpasswd_listen = 192.168.65.19:21731 +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +acl_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/kadm5.acl +dict_file = /opt/huawei/Bigdata/common/runtime/security/weakPasswdDic/weakPasswdForKdc.ini +key_stash_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/.k5.HADOOP.COM +} + +[domain_realm] +.hadoop.com = HADOOP.COM + +[logging] +kdc = SYSLOG:INFO:DAEMON +admin_server = SYSLOG:INFO:DAEMON +default = SYSLOG:NOTICE:DAEMON diff --git a/src/main/resources/wellSensor/log4j.properties b/src/main/resources/wellSensor/log4j.properties new file mode 100644 index 0000000..10e767d --- /dev/null +++ b/src/main/resources/wellSensor/log4j.properties @@ -0,0 +1,37 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +kafka.logs.dir=logs + +log4j.rootLogger=INFO, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.logger.kafka=ERROR, kafkaAppender + +log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.kafkaAppender.File=${kafka.logs.dir}/client.log +log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +# Turn on all our debugging info +#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender +#log4j.logger.kafka.client.ClientUtils=DEBUG, kafkaAppender +#log4j.logger.kafka.perf=DEBUG, kafkaAppender +#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender +#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/src/main/java/org/well/well/util/ResponseResolver.java b/src/main/java/org/well/well/util/ResponseResolver.java index 6fef231..f73734e 100644 --- a/src/main/java/org/well/well/util/ResponseResolver.java +++ b/src/main/java/org/well/well/util/ResponseResolver.java @@ -20,14 +20,15 @@ static { abstractResponseHashMap.put(DeviceTypeEnum.Liquid.name(), new LiquidResponse()); abstractResponseHashMap.put(DeviceTypeEnum.Well.name(), new WellResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse());//集中器 abstractResponseHashMap.put(DeviceTypeEnum.Methane.name(), new MethaneResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.WasteGas.name(), new WasteGasResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse()); -// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse());//温湿度 +// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse());//噪音开挖 + abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse());//井盖定位 + abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse());//水质检测 } diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..4701fab --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,41 @@ +Manifest-Version: 1.0 +Main-Class: org.well.mysql.sink.WellSink +Class-Path: slf4j-log4j12-1.6.1.jar commons-codec-1.9.jar commons-pool2- + 2.4.2.jar javassist-3.18.1-GA.jar jcl-over-slf4j-1.7.7.jar jansi-1.11.j + ar hawtbuf-1.11.jar jackson-databind-2.4.2.jar mina-core-2.0.4.jar juni + t-4.10.jar log4j-1.2.16.jar dom4j-1.6.1.jar hamcrest-core-1.1.jar orika + -core-1.4.5.jar activemq-pool-5.14.5.jar activemq-client-5.14.5.jar spr + ing-beans-4.0.6.RELEASE.jar httpcore-4.2.1.jar jboss-transaction-api_1. + 2_spec-1.0.0.Final.jar commons-io-2.4.jar concurrentlinkedhashmap-lru-1 + .2_jdk5.jar commons-cli-1.2.jar commons-fileupload-1.3.1.jar jsr305-1.3 + .9.jar gson-2.2.2.jar jackson-jaxrs-json-provider-2.4.2.jar jetty-6.1.2 + 6.jar janino-2.7.6.jar commons-beanutils-1.9.2.jar hibernate-core-4.3.6 + .Final.jar guava-10.0.1.jar spring-context-support-4.0.6.RELEASE.jar lo + g4jdbc-remix-0.2.7.jar hibernate-jpa-2.1-api-1.0.0.Final.jar servlet-ap + i-2.5-20110124.jar geronimo-jms_1.1_spec-1.1.1.jar java-sizeof-0.0.4.ja + r aopalliance-1.0.jar jandex-1.1.0.Final.jar jboss-logging-3.1.3.GA.jar + antlr-2.7.7.jar validation-api-1.1.0.Final.jar geronimo-jta_1.0.1B_spe + c-1.0.1.jar jackson-jaxrs-base-2.4.2.jar logback-core-1.1.2.jar hiberna + te-validator-5.1.2.Final.jar logback-classic-1.1.2.jar avro-1.7.3.jar s + pring-tx-4.0.6.RELEASE.jar spring-orm-4.0.6.RELEASE.jar activemq-broker + -5.14.5.jar kafka-clients-2.4.0-hw-ei-312005.jar commons-pool-1.5.4.jar + ezmorph-1.0.6.jar netty-3.4.0.Final.jar avro-ipc-1.7.3.jar flume-ng-co + re-1.4.0.jar activemq-openwire-legacy-5.14.5.jar commons-compiler-2.7.6 + .jar jetty-util-6.1.26.jar xbean-spring-4.2.jar jackson-core-asl-1.9.3. + jar jackson-mapper-asl-1.9.3.jar spring-core-4.0.6.RELEASE.jar geronimo + -j2ee-management_1.1_spec-1.0.1.jar ojdbc6-11.1.0.7.0.jar json-lib-2.4- + jdk15.jar activemq-jms-pool-5.14.5.jar jul-to-slf4j-1.7.7.jar velocity- + 1.7.jar joda-time-2.1.jar commons-httpclient-3.1.jar spring-context-4.0 + .6.RELEASE.jar commons-lang3-3.3.2.jar commons-lang-2.5.jar libthrift-0 + .7.0.jar jackson-core-2.4.2.jar hibernate-commons-annotations-4.0.5.Fin + al.jar commons-logging-1.1.1.jar fastjson-1.1.15.jar mysql-connector-ja + va-5.1.25.jar spring-aop-4.0.6.RELEASE.jar slf4j-api-1.7.7.jar httpclie + nt-4.2.1.jar flume-ng-configuration-1.4.0.jar jboss-logging-annotations + -1.2.0.Beta1.jar snappy-java-1.0.4.1.jar paranamer-2.3.jar flume-ng-sdk + -1.4.0.jar spring-webmvc-4.0.6.RELEASE.jar jackson-annotations-2.4.0.ja + r jackson-module-jaxb-annotations-2.4.2.jar commons-dbcp-1.4.jar spring + -jms-3.2.8.RELEASE.jar spring-expression-4.0.6.RELEASE.jar jstl-1.2.jar + xml-apis-1.0.b2.jar activemq-spring-5.14.5.jar spring-jdbc-4.0.6.RELEA + SE.jar classmate-1.0.0.jar commons-collections-3.2.1.jar spring-web-4.0 + .6.RELEASE.jar + diff --git a/src/main/resources/wellSensor/77042.jaas.conf b/src/main/resources/wellSensor/77042.jaas.conf new file mode 100644 index 0000000..3abd31a --- /dev/null +++ b/src/main/resources/wellSensor/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/wellSensor/application.properties b/src/main/resources/wellSensor/application.properties index 0c2d25c..a8970d1 100644 --- a/src/main/resources/wellSensor/application.properties +++ b/src/main/resources/wellSensor/application.properties @@ -20,9 +20,9 @@ #db.default.username=sensor #db.default.password=sensor db.default.driverClassName=com.mysql.jdbc.Driver -db.default.url=jdbc:mysql://192.168.0.166:3306/smartwell?useUnicode=true&characterEncoding=UTF-8&useSSL=false +db.default.url=jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?useUnicode=true&characterEncoding=UTF-8&useSSL=false db.default.username=root -db.default.password=root +db.default.password=casic203yz2db ### ============================================================================ # dbReal # ============================================================================ @@ -157,8 +157,8 @@ # ============================================================================ # ACTIVEMQ配置 # ============================================================================ -activemq_url = tcp://192.168.0.203:61616 -activemq_username = +activemq_url = tcp://127.0.0.1:61616 +activemq_username ="" activemq_password = @@ -166,6 +166,13 @@ # 告警、工单推送地址 # ============================================================================ #sendURL =http://111.198.10.15:11302/smartwell/job/updateSinkJob -sendURL =http://localhost:14537/job/updateSinkJob +sendURL =http://192.168.0.218:80/smartwell/job/updateSinkJob + +# ============================================================================ +# kafka大数据平台 + +bootstrapServer=192.168.65.14:21005,192.168.65.15:21005,192.168.65.16:21005 +alarmTopic=MSGQUEUE_8287 +dataTopic=TEMPSTORE_8204 diff --git a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml index a11375d..4b1a083 100644 --- a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml +++ b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml @@ -16,8 +16,7 @@ + userName="${activemq_username}"/> diff --git a/src/main/resources/wellSensor/consumer.properties b/src/main/resources/wellSensor/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/wellSensor/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/wellSensor/kafkaSecurityMode b/src/main/resources/wellSensor/kafkaSecurityMode new file mode 100644 index 0000000..ed59a5e --- /dev/null +++ b/src/main/resources/wellSensor/kafkaSecurityMode @@ -0,0 +1 @@ +kafka.client.security.mode = yes diff --git a/src/main/resources/wellSensor/krb5.conf b/src/main/resources/wellSensor/krb5.conf new file mode 100644 index 0000000..003c6c7 --- /dev/null +++ b/src/main/resources/wellSensor/krb5.conf @@ -0,0 +1,48 @@ +[kdcdefaults] +kdc_ports = 192.168.65.19:21732 +kdc_tcp_ports = "" + +[libdefaults] +default_realm = HADOOP.COM +kdc_timeout = 2500 +clockskew = 300 +use_dns_lookup = 0 +udp_preference_limit = 1465 +max_retries = 5 +dns_lookup_kdc = false +dns_lookup_realm = false +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +allow_extend_version = false +default_ccache_name = FILE:/tmp//krb5cc_%{uid} + +[realms] +HADOOP.COM = { +kdc = 192.168.65.19:21732 +kdc = 192.168.65.18:21732 +admin_server = 192.168.65.19:21730 +admin_server = 192.168.65.18:21730 +kpasswd_server = 192.168.65.19:21731 +kpasswd_server = 192.168.65.18:21731 +kpasswd_port = 21731 +kadmind_port = 21730 +kadmind_listen = 192.168.65.19:21730 +kpasswd_listen = 192.168.65.19:21731 +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +acl_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/kadm5.acl +dict_file = /opt/huawei/Bigdata/common/runtime/security/weakPasswdDic/weakPasswdForKdc.ini +key_stash_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/.k5.HADOOP.COM +} + +[domain_realm] +.hadoop.com = HADOOP.COM + +[logging] +kdc = SYSLOG:INFO:DAEMON +admin_server = SYSLOG:INFO:DAEMON +default = SYSLOG:NOTICE:DAEMON diff --git a/src/main/resources/wellSensor/log4j.properties b/src/main/resources/wellSensor/log4j.properties new file mode 100644 index 0000000..10e767d --- /dev/null +++ b/src/main/resources/wellSensor/log4j.properties @@ -0,0 +1,37 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +kafka.logs.dir=logs + +log4j.rootLogger=INFO, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.logger.kafka=ERROR, kafkaAppender + +log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.kafkaAppender.File=${kafka.logs.dir}/client.log +log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +# Turn on all our debugging info +#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender +#log4j.logger.kafka.client.ClientUtils=DEBUG, kafkaAppender +#log4j.logger.kafka.perf=DEBUG, kafkaAppender +#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender +#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG diff --git a/src/main/resources/wellSensor/producer.properties b/src/main/resources/wellSensor/producer.properties new file mode 100644 index 0000000..5e6446a --- /dev/null +++ b/src/main/resources/wellSensor/producer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +acks = 1 +bootstrap.servers = 192.168.65.16:21007,192.168.65.15:21007,192.168.65.14:21007 +sasl.kerberos.service.name = kafka diff --git a/pom.xml b/pom.xml index a806efa..3a7d7a4 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ http://maven.apache.org UTF-8 + 2.4.0-hw-ei-312005 @@ -163,6 +164,24 @@ 0.2.7 + + org.apache.kafka + kafka-clients + ${kafka.version} + + + xml-apis + xml-apis + + + + + + xml-apis + xml-apis + 1.4.01 + + commons-beanutils @@ -209,7 +228,6 @@ 1.4.5 - org.slf4j @@ -262,5 +280,53 @@ spring-jms 3.2.8.RELEASE + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + \ No newline at end of file diff --git a/src/main/java/org/well/mysql/sink/WellSink.java b/src/main/java/org/well/mysql/sink/WellSink.java index bca4fde..b363729 100644 --- a/src/main/java/org/well/mysql/sink/WellSink.java +++ b/src/main/java/org/well/mysql/sink/WellSink.java @@ -54,8 +54,6 @@ super.start(); LOG.info("--------wellMysqlSink start-------"); System.out.println("--------wellMysqlSink start-------"); - - } @Override @@ -125,7 +123,7 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Concentrator\",\"devCode\":\"00003\",\"mBody\":{\"logTime\":\"20190605002024\",\"bType\":\"ConcentratorOnline\"},\"ts\":1559665224343}"; temp="{\"mType\":\"Data\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"datas\":[{\"value\":\"00\",\"uptime\":\"20190605000000\"}],\"logTime\":\"201906010003002\",\"bType\":\"WellData\"},\"ts\":1559665802828}"; - temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"cell\":9.9,\"datas\":[{\"level\":1.1892,\"uptime\":\"20191213000000\"}],\"logTime\":\"20190502000125\",\"bType\":\"LiquidData\"},\"ts\":1556726485336}"; // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidPressureError\"],\"logTime\":\"20190510134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; // temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019010212\",\"mBody\":{\"eventType\":[\"WellLowBatteryAlarm\"],\"logTime\":\"20190624114710\",\"bType\":\"WellEvent\"},\"ts\":1560484030810}"; // temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"71201900001\",\"mBody\":{\"datas\":[{\"longitude\":126.243324343,\"latitude\":39.2546546546,\"uptime\":\"20190809140900\"}],\"logTime\":\"20190809141012\",\"bType\":\"LocatorData\"},\"ts\":1565331012453}"; @@ -135,19 +133,19 @@ // temp="{\"mType\":\"Event\",\"devType\":\"Liquid\",\"devCode\":\"11201900001\",\"mBody\":{\"eventType\":[\"LiquidUltrasonicError\"],\"logTime\":\"20190710134635\",\"bType\":\"LiquidEvent\"},\"ts\":1557467195358}"; -// temp="{\"mType\":\"Event\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; +// temp="{\"mType\":\"Data\",\"devType\":\"TempHumi\",\"devCode\":\"51201900001\",\"mBody\":{\"eventType\":[\"TemperatureFail\",\"TemperatureError\",\"HumidityFail\",\"HumidityError\"],\"logTime\":\"20191021140118\",\"bType\":\"TempHumiEvent\"},\"ts\":1571292078959}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"NoiseDig\",\"devCode\":\"88888888881\",\"mBody\":{\"bType\":\"NoiseDigConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"WasteGas\",\"devCode\":\"13131313132\",\"mBody\":{\"bType\":\"WasteGasData\",\"datas\":[{\"CO\":80,\"O2\":18,\"H2S\":10,\"CH4\":5,\"liquidSwitch\":true,\"uptime\":\"20191220165124\"}],\"logTime\":\"20191220165124\"},\"ts\":1556182310514}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"13131313135\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":98,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"level\":6.73,\"uptime\":\"20191217161000\"},{\"level\":5.72,\"uptime\":\"20191217162000\"},{\"level\":7.71,\"uptime\":\"20200211103000\"}],\"logTime\":\"20200211103000\"},\"ts\":1556186030842}"; // temp="{\"mType\":\"SetResponse\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidConfigSuccess\"},\"ts\":1556182310514}"; // temp="{\"mType\":\"Data\",\"devType\":\"Liquid\",\"devCode\":\"12121212125\",\"mBody\":{\"bType\":\"LiquidData\",\"cell\":97,\"datas\":[{\"level\":4,\"uptime\":\"20191219150000\"},{\"level\":4,\"uptime\":\"20191219151000\"},{\"level\":6.5,\"uptime\":\"20191219152000\"}],\"logTime\":\"20191219152000\"},\"ts\":1556186030842}"; -// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; - temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; +// temp="{\"mType\":\"Data\",\"devType\":\"NoiseDig\",\"devCode\":\"14141414146\",\"mBody\":{\"bType\":\"NoiseDigData\",\"cell\":88,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"noiseVal\":60,\"noiseFreq\":50,\"uptime\":\"20200109123131\"}],\"logTime\":\"20200119123131\"},\"ts\":1556184691451}"; + temp="{\"mType\":\"Event\",\"devType\":\"Well\",\"devCode\":\"412019122104\",\"mBody\":{\"eventType\":[\"WellOpenAlarm\"],\"logTime\":\"20191017154056\",\"bType\":\"WellEvent\"},\"ts\":1571298056241}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Methane\",\"devCode\":\"312020011111\",\"mBody\":{\"cell\":95,\"pci\":100,\"rsrp\":50,\"snr\":20,\"datas\":[{\"gas\":0.064453125,\"uptime\":\"20200407085400\"},{\"gas\":0.080566406,\"uptime\":\"20200407085400\"},{\"gas\":25,\"uptime\":\"20200407085400\"},{\"gas\":0.09990235,\"uptime\":\"20200407085400\"},{\"gas\":0.070898436,\"uptime\":\"20200407085400\"},{\"gas\":60,\"uptime\":\"20200408085400\"}],\"logTime\":\"20200407085400\",\"bType\":\"MethaneData\"},\"ts\":1571292084960}"; -// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; +// temp="{\"mType\":\"Data\",\"devType\":\"Locator\",\"devCode\":\"642019010387\",\"mBody\":{\"datas\":[{\"longitude\":0.0,\"latitude\":0.0,\"uptime\":\"20200118111000\"}],\"logTime\":\"20200118111006\",\"bType\":\"LocatorData\"},\"ts\":1579317006078}"; -// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; +// temp="{\"Status\":\"[{\\\"Value\\\":7.8876,\\\"Key\\\":\\\"PH\\\"},{\\\"Value\\\":28.0265,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":0.1994,\\\"Key\\\":\\\"Turb\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"Cond\\\"},{\\\"Value\\\":0.5252,\\\"Key\\\":\\\"DO\\\"},{\\\"Value\\\":0,\\\"Key\\\":\\\"COD\\\"},{\\\"Value\\\":2746.4216,\\\"Key\\\":\\\"AN\\\"},{\\\"Value\\\":1.0002,\\\"Key\\\":\\\"TP\\\"},{\\\"Value\\\":1.4385,\\\"Key\\\":\\\"TN\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"devType\":\"WaterQuality\",\"LogTime\":\"2020-06-30 16:21:36\",\"DevID\":\"W1L30Z\",\"Provider\":\"KaiNa\"}"; AbstractResponse resp = ResponseResolver.makeResponse(temp); resp.setAc(ac); resp.process(temp); diff --git a/src/main/java/org/well/well/kafka/Producer.java b/src/main/java/org/well/well/kafka/Producer.java new file mode 100644 index 0000000..bd0cef4 --- /dev/null +++ b/src/main/java/org/well/well/kafka/Producer.java @@ -0,0 +1,44 @@ +package org.well.well.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.well.well.kafka.util.KafkaProperties; +import org.well.well.kafka.util.KafkaUtils; +import org.well.well.kafka.util.LoginUtil; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private static final Logger LOG = LoggerFactory.getLogger(Producer.class); + private static KafkaProducer producer; + + public static void send(String content, String topic) throws IOException { + + LOG.debug("producer start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 +// LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); +// System.out.println("-----"+props.toString()); + // 发布 + producer = new KafkaProducer(props); + ProducerRecord record = new ProducerRecord(topic, "", content); + try { + LOG.info("主题为"+ topic +"kafka发送数据内容-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + } + producer.close(); + } +} diff --git a/src/main/java/org/well/well/kafka/StandardAlarm.java b/src/main/java/org/well/well/kafka/StandardAlarm.java new file mode 100644 index 0000000..d12cfc7 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardAlarm.java @@ -0,0 +1,66 @@ +package org.well.well.kafka; + +public class StandardAlarm { + + private String DevID; + + private String DevType; + + private String Provider; + + private String Data; + + private String LogTime; + + public StandardAlarm(String DevID, String DevType, String Data, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Data = Data; + this.LogTime = LogTime; + } + + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getData() { + return Data; + } + + public void setData(String data) { + Data = data; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } + + +} + diff --git a/src/main/java/org/well/well/kafka/StandardData.java b/src/main/java/org/well/well/kafka/StandardData.java new file mode 100644 index 0000000..9f33d81 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardData.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/org/well/well/kafka/StandardDataUtils.java b/src/main/java/org/well/well/kafka/StandardDataUtils.java new file mode 100644 index 0000000..dba65a8 --- /dev/null +++ b/src/main/java/org/well/well/kafka/StandardDataUtils.java @@ -0,0 +1,60 @@ +package org.well.well.kafka; + +import com.alibaba.fastjson.JSON; +import net.sf.json.JSONObject; +import org.springframework.util.CollectionUtils; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + + public static String standardStatusFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { + List> standardStatusFomateList = new ArrayList<>(); + if (realParam != null) { + for (int i = 0; i < realParam.length; i++) { + Map standardStatusMap = new HashMap<>(); + standardStatusMap.put("Key", standardkeyParm[i]); + standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); + standardStatusFomateList.add(standardStatusMap); + } + } + if (!CollectionUtils.isEmpty(appenList)) { + standardStatusFomateList.addAll(appenList); + } + return JSON.toJSONString(standardStatusFomateList); + } + + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + + +// public static String standardAlarmDataFomate(JSONObject jsonObject, String[] realParam, String[] standardkeyParm, List> appenList) { +// List> standardStatusFomateList = new ArrayList<>(); +// for (int i = 0; i < realParam.length; i++) { +// Map standardStatusMap = new HashMap<>(); +// standardStatusMap.put("Key", standardkeyParm[i]); +// standardStatusMap.put("Value", jsonObject.get(realParam[i]).toString()); +// standardStatusFomateList.add(standardStatusMap); +// } +// if (!CollectionUtils.isEmpty(appenList)) { +// standardStatusFomateList.addAll(appenList); +// } +// return JSON.toJSONString(standardStatusFomateList); +// } + +} diff --git a/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java new file mode 100644 index 0000000..ec1912b --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/DeviceTypeConstant.java @@ -0,0 +1,17 @@ +package org.well.well.kafka.constant; + +public class DeviceTypeConstant { + + public static final String LIQUID="LiquidLevel"; + public static final String LOCATION="Location"; + public static final String METHANE="Gas"; + + public static final String WASTE_GAS="HarmfulGas"; + public static final String TEMP_HUMI="AirTempHumi"; + public static final String WELL="ManholeCover"; + public static final String NOISE_DIG="DigNoise"; + public static final String WATER_QUALITY="WaterQuality"; + + public final static String KAFKA_DATA_FLAG = "kafkaDataFlag"; + +} diff --git a/src/main/java/org/well/well/kafka/constant/TopicConstant.java b/src/main/java/org/well/well/kafka/constant/TopicConstant.java new file mode 100644 index 0000000..50e807d --- /dev/null +++ b/src/main/java/org/well/well/kafka/constant/TopicConstant.java @@ -0,0 +1,8 @@ +package org.well.well.kafka.constant; + +public class TopicConstant { + + public static final String DATA_TOPIC="dataTopic"; + public static final String ALARM_TOPIC="alarmTopic"; + +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaProperties.java b/src/main/java/org/well/well/kafka/util/KafkaProperties.java new file mode 100644 index 0000000..97e24cb --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaProperties.java @@ -0,0 +1,132 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties { + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String DATA_TOPIC = "TEMPSTORE_8204"; + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + private KafkaProperties() { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + + + try { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } catch (IOException e) { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() { + if (null == instance) { + instance = new KafkaProperties(); + } + + return instance; + } + + /** + * 获取参数值 + * + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) { + String rtValue = null; + + if (null == key) { + LOG.error("key is null"); + } else { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * + * @param key + * @return + */ + private String getPropertiesValue(String key) { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/org/well/well/kafka/util/KafkaUtils.java b/src/main/java/org/well/well/kafka/util/KafkaUtils.java new file mode 100644 index 0000000..2951fe3 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/KafkaUtils.java @@ -0,0 +1,136 @@ +package org.well.well.kafka.util; + + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + +// props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + +// System.setProperty("java.security.auth.login.config","D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\77042.jaas.conf"); + + return props; + } + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "org.well.well.kafka.util.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + +} diff --git a/src/main/java/org/well/well/kafka/util/LoginUtil.java b/src/main/java/org/well/well/kafka/util/LoginUtil.java new file mode 100644 index 0000000..fc8a8cf --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/LoginUtil.java @@ -0,0 +1,257 @@ +package org.well.well.kafka.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/java/org/well/well/kafka/util/SimplePartitioner.java b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java new file mode 100644 index 0000000..bb4ef71 --- /dev/null +++ b/src/main/java/org/well/well/kafka/util/SimplePartitioner.java @@ -0,0 +1,36 @@ +package org.well.well.kafka.util; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/org/well/well/manager/LiquidManager.java b/src/main/java/org/well/well/manager/LiquidManager.java index c421e02..e8cd95c 100644 --- a/src/main/java/org/well/well/manager/LiquidManager.java +++ b/src/main/java/org/well/well/manager/LiquidManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; import org.well.well.dto.LiquidDTO; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -63,6 +71,8 @@ return resultMap; } + + public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; @@ -70,13 +80,15 @@ String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"level"}; + String[] standardParam = new String[]{"Level"}; for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("level").toString(); level = String.format("%.2f", Double.valueOf(level)); String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); - //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 @@ -85,6 +97,13 @@ LiquidDTO liquidDTO = new LiquidDTO(level, cell, upTime, devCode, busWell == null ? "" : busWell.getWellCode(), pci,rsrp,snr); + + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LIQUID, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } this.saveData(liquidDTO);//存采集数据 //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; @@ -107,7 +126,7 @@ String alarmContent = LiquidAlarmEnum.OVER_THRESH.getName(); Integer alarmLevel = 0; -// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); +// List ruleRankList= alarmRuleManager.getRuleRank(devCode,busWell.getDeptid()); for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { @@ -133,6 +152,7 @@ }else if("0".equals(isJob)){//不需要产生工单时 jobID=null; } + alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, level, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); } else {//不存在上一条报警时 @@ -144,6 +164,13 @@ alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Liquid.toString(), "1"); } + + StandardAlarm standardAlarm=new StandardAlarm(devCode, DeviceTypeConstant.LIQUID, StandardDataUtils.alarmDataBuilder(devCode,level,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, alarmContent, level, device, diff --git a/src/main/java/org/well/well/manager/LocatorManager.java b/src/main/java/org/well/well/manager/LocatorManager.java index 940297c..ebb7bd6 100644 --- a/src/main/java/org/well/well/manager/LocatorManager.java +++ b/src/main/java/org/well/well/manager/LocatorManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -11,9 +12,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.domain.BusWell; import org.well.well.domain.Locator; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -27,7 +35,7 @@ Map resultMap = new HashMap(); try { - Locator locator = new Locator(devCode, wellCode,longitude, latitude); + Locator locator = new Locator(devCode, wellCode, longitude, latitude); save(locator); } catch (Exception e) { e.printStackTrace(); @@ -38,11 +46,25 @@ public void processNormalData(JSONObject jsonObject, String devCode) { try { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + String[] realParam = new String[]{"longitude", "latitude"}; + String[] standardParam = new String[]{"Lon", "lat"}; if (busWell != null && "1".equals(busWell.getBfzt())) {//只有井处于布防状态下才写入经纬度数据 for (int i = 0; i < jsonArray.size(); i++) { String longitude = ((JSONObject) jsonArray.get(i)).get("longitude").toString(); String latitude = ((JSONObject) jsonArray.get(i)).get("latitude").toString(); + String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); + //如果来自于kafka的数据,不需要处理,kafka数据增加了kafkaDataFlagflag的格式 + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.LOCATION, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } if (StringUtils.isBlank(longitude) || StringUtils.isBlank(latitude)) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; @@ -50,15 +72,14 @@ if (Float.valueOf(longitude) < 1 || Float.valueOf(latitude) < 1) { longitude = null != busWell ? busWell.getCoordinateX() : longitude; latitude = null != busWell ? busWell.getCoordinateY() : latitude; - }else { + } else { // Map stringObjectMap=PointConvertUtility. // convertBaiduAPI(longitude,latitude); // longitude=stringObjectMap!=null?stringObjectMap.get("x").toString():longitude; // latitude=stringObjectMap!=null?stringObjectMap.get("y").toString():latitude; - } } - saveData(devCode,busWell==null?"":busWell.getWellCode(), + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), StringUtils.isNotBlank(longitude) ? DecimalUtils.df6.format(Double.valueOf(longitude)) : longitude, StringUtils.isNotBlank(latitude) ? DecimalUtils.df6.format(Double.valueOf(latitude)) : latitude);//存采集数据 } diff --git a/src/main/java/org/well/well/manager/MeterManager.java b/src/main/java/org/well/well/manager/MeterManager.java index 0fa82ff..31e5c9e 100644 --- a/src/main/java/org/well/well/manager/MeterManager.java +++ b/src/main/java/org/well/well/manager/MeterManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.well.well.base.DeviceTypeEnum; @@ -7,9 +8,16 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,7 +47,8 @@ @Transactional public void processMeterData(Map meterMap) { - + String logtime = meterMap.get("LogTime"); + String kafkaDataFlag = meterMap.get(DeviceTypeConstant.KAFKA_DATA_FLAG); String devCodeBefore = meterMap.get("devCode"); if ("399435X0000002".equals(devCodeBefore)) { meterMap.put("devCode", "2006243001"); @@ -118,6 +127,16 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Meter.toString(), "1"); + if(kafkaDataFlag.equals("0")){ + StandardAlarm standardAlarm = new StandardAlarm(devCode, + DeviceTypeConstant.WATER_QUALITY, + StandardDataUtils.alarmDataBuilder(devCode, alarmValue, alarmContent), logtime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } + //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", null, "水质超限告警", @@ -130,7 +149,6 @@ // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); ThreadUtil.excuteMsg(alarmJob.getId(), "水质超限告警", busWell != null ? busWell.getWellCode() : ""); - } } else {//未超限 //1.清除之前的报警 diff --git a/src/main/java/org/well/well/manager/MethaneManager.java b/src/main/java/org/well/well/manager/MethaneManager.java index 3b50bcb..8b4a408 100644 --- a/src/main/java/org/well/well/manager/MethaneManager.java +++ b/src/main/java/org/well/well/manager/MethaneManager.java @@ -1,5 +1,6 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -9,9 +10,17 @@ import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -48,7 +57,7 @@ try { Methane methane = new Methane(address, wellCode, strength, cell, new Timestamp((DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)).getTime())), - new Timestamp((new Date()).getTime()),pci,rsrp,snr); + new Timestamp((new Date()).getTime()), pci, rsrp, snr); save(methane); } catch (Exception e) { e.printStackTrace(); @@ -64,6 +73,9 @@ String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"gas"}; + String[] standardParam = new String[]{"Gas"}; for (int i = 0; i < jsonArray.size(); i++) { try { String gas = ((JSONObject) jsonArray.get(i)).get("gas").toString(); @@ -73,6 +85,12 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.METHANE, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime, pci, rsrp, snr);//存储采集数据 //4.有无超限处理流程(判断最后一条数据) @@ -102,6 +120,11 @@ //1.生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell != null ? busWell.getWellCode() : "", DeviceTypeEnum.Methane.toString(), "1"); + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.METHANE, StandardDataUtils.alarmDataBuilder(devCode, gas, alarmContent), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } //2.写入新的告警 alarmRecordManager.saveData(alarmJob.getId(), "1", alarmLevel, alarmContent, gas, device, diff --git a/src/main/java/org/well/well/manager/NoiseDigManager.java b/src/main/java/org/well/well/manager/NoiseDigManager.java index 6cc0619..e0bbcf8 100644 --- a/src/main/java/org/well/well/manager/NoiseDigManager.java +++ b/src/main/java/org/well/well/manager/NoiseDigManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -13,6 +14,13 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; @@ -41,13 +49,13 @@ public Map saveData(String devId, String wellCode, String cell, String uptime, String frequency, String dData, - String pci,String rsrp,String snr) throws IOException { + String pci, String rsrp, String snr) throws IOException { Map resultMap = new HashMap(); try { NoiseDig noise = new NoiseDig(devId, wellCode, cell, DateUtils.sdf4.parse(DateUtils.DateFormat(uptime)), - frequency, dData,pci,rsrp,snr); + frequency, dData, pci, rsrp, snr); save(noise); } catch (Exception e) { e.printStackTrace(); @@ -57,11 +65,15 @@ public void processNormalData(JSONObject jsonObject, String devCode) { String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; - JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; BusWell busWell = busWellManager.getWellByDevCode(devCode); + //数据对接所用到参数 + JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + List> appendList = StandardDataUtils.appendListBuilder(cell,"Power"); + String[] realParam = new String[]{"noiseVal", "noiseFreq"}; + String[] standardParam = new String[]{"NoiseVal", "NoiseFreq"}; for (int i = 0; i < jsonArray.size(); i++) { try { String val = ((JSONObject) jsonArray.get(i)).get("noiseVal").toString(); @@ -69,12 +81,20 @@ String upTime = ((JSONObject) jsonArray.get(i)).get("uptime").toString(); val = String.format("%.2f", Double.valueOf(val)); freq = String.format("%.2f", Double.valueOf(freq)); + //1.清离线 deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.NOISE_DIG, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if(!StringUtils.isEmpty(dataTopic)){ + Producer.send(JSON.toJSONString(standardData),dataTopic); + } //3.存数据 - saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val,pci,rsrp,snr); //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 + saveData(devCode, busWell == null ? "" : busWell.getWellCode(), cell, upTime, freq, val, pci, rsrp, snr); + //4.有无超限处理流程(判断最后一条数据) saveData(devCode, busWell == null ? "" : busWell.getWellCode(), gas, cell, upTime);//存储采集数据 if (i < jsonArray.size() - 1) continue; String thresh = alarmRuleManager.getRuleStr(devCode, DeviceTypeEnum.NoiseDig.toString());//获取报警阈值 List ruleRankList = new ArrayList(); @@ -100,8 +120,7 @@ for (int j = 0; j < ruleRankList.size(); j++) { Object rankValue = ((Object[]) ruleRankList.get(j))[2]; if (null != rankValue) { - - if (Float.valueOf(val) >= ( new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue())/100) { + if (Float.valueOf(val) >= (new BigDecimal(rankValue.toString()).multiply(new BigDecimal(thresh)).floatValue()) / 100) { // NoiseDigAlarmEnum noiseDigAlarmEnum=NoiseDigAlarmEnum.getByIndex( // String.valueOf(Long.valueOf(((Object[]) ruleRankList.get(j))[0].toString()) + 4)); // alarmContent = noiseDigAlarmEnum!=null?noiseDigAlarmEnum.getName():alarmContent; @@ -122,10 +141,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.NoiseDig.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell == null ? "" : busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell == null ? "" : busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, alarmContent, val, alarmRecord.getDevice(), devCode, alarmRecord.getWellCode(), upTime, alarmContent); @@ -147,8 +166,12 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - - ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent,busWell != null ? busWell.getWellCode() : ""); + StandardAlarm standardAlarm=new StandardAlarm(devCode,DeviceTypeConstant.NOISE_DIG,StandardDataUtils.alarmDataBuilder(devCode,val,alarmContent),upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + ThreadUtil.excuteMsg(alarmJob == null ? null : alarmJob.getId(), alarmContent, busWell != null ? busWell.getWellCode() : ""); } } else {//未超限 diff --git a/src/main/java/org/well/well/manager/TempHumiManager.java b/src/main/java/org/well/well/manager/TempHumiManager.java index d84dcc7..a011ba9 100644 --- a/src/main/java/org/well/well/manager/TempHumiManager.java +++ b/src/main/java/org/well/well/manager/TempHumiManager.java @@ -1,6 +1,7 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; @@ -14,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -42,15 +50,15 @@ private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private static String[] nameArr = {"温度", "湿度"}; - public Map saveData(String devCode, String wellCode,String upTime, + public Map saveData(String devCode, String wellCode, String upTime, String temperature, String humidity, String cell, - String pci,String rsrp,String snr) { + String pci, String rsrp, String snr) { Map resultMap = new HashMap(); try { - TempHumi tempHumi = new TempHumi(devCode, wellCode, + TempHumi tempHumi = new TempHumi(devCode, wellCode, DateUtils.sdf4.parse(DateUtils.DateFormat(upTime)), - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); save(tempHumi); } catch (Exception e) { e.printStackTrace(); @@ -60,10 +68,14 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); String cell = jsonObject.get("cell").toString(); - String pci = null!=jsonObject.get("pci")?jsonObject.get("pci").toString():""; - String rsrp = null!=jsonObject.get("rsrp")?jsonObject.get("rsrp").toString():""; - String snr = null!=jsonObject.get("snr")?jsonObject.get("snr").toString():""; + String pci = null != jsonObject.get("pci") ? jsonObject.get("pci").toString() : ""; + String rsrp = null != jsonObject.get("rsrp") ? jsonObject.get("rsrp").toString() : ""; + String snr = null != jsonObject.get("snr") ? jsonObject.get("snr").toString() : ""; + List> appendList = StandardDataUtils.appendListBuilder(cell, "Power"); + String[] realParam = new String[]{"temperature", "humidity"}; + String[] standardParam = new String[]{"Temp", "Humi"}; BusWell busWell = busWellManager.getWellByDevCode(devCode); for (int i = 0; i < jsonArray.size(); i++) { try { @@ -77,13 +89,23 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.TEMP_HUMI, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, - temperature, humidity, cell,pci,rsrp,snr); + temperature, humidity, cell, pci, rsrp, snr); //4.有无超限处理流程(判断最后一条数据) if (i < jsonArray.size() - 1) continue; if (busWell != null) {//获取不到相关联的井,就无法获取告警规则,故不存库 - String alarmContent = "", alarmValue = "",isAlarm="1",isJob="1"; + String alarmContent = "", alarmValue = "", isAlarm = "1", isJob = "1"; Integer alarmLevel = 10; List ruleRankList = alarmRuleManager.getRuleRank(devCode, busWell.getDeptid()); if (ruleRankList.size() > 0) { @@ -134,7 +156,7 @@ } } - if (StringUtils.isNotBlank(alarmContent)&&"1".equals(isAlarm)) { + if (StringUtils.isNotBlank(alarmContent) && "1".equals(isAlarm)) { AlarmRecord alarmRecord = alarmRecordManager.getThresholdAlarm(devCode); if (null != alarmRecord) {//已存在报警 //1.清除上一条告警记录 @@ -145,10 +167,10 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, alarmRecord.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); - jobID=alarmJob.getId(); - ThreadUtil.excuteMsg(jobID, "",busWell.getWellCode()); - }else if("0".equals(isJob)){//不需要产生工单时 - jobID=null; + jobID = alarmJob.getId(); + ThreadUtil.excuteMsg(jobID, "", busWell.getWellCode()); + } else if ("0".equals(isJob)) {//不需要产生工单时 + jobID = null; } alarmRecordManager.saveData(jobID, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), @@ -158,15 +180,23 @@ } else {//不存在上一条报警时 Device device = deviceManager.getDeviceByDevCode(devCode); if (device == null) return;//未注册设备舍弃 - AlarmJob alarmJob =null; - if("1".equals(isJob)){ + AlarmJob alarmJob = null; + if ("1".equals(isJob)) { //1.若开启工单,生成新的工单 - alarmJob = alarmJobManager.saveData(devCode, - busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(),"1"); + alarmJob = alarmJobManager.saveData(devCode, + busWell.getWellCode(), DeviceTypeEnum.TempHumi.toString(), "1"); } - + String val = "湿度:" + humidity + "%RH," + "温度:" + temperature + "℃,"; + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.TEMP_HUMI, StandardDataUtils.alarmDataBuilder(devCode, val, + TempHumiAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if(!StringUtils.isEmpty(alarmTopic)){ + Producer.send(JSON.toJSONString(standardAlarm),alarmTopic); + } + } //2.写入新的告警 - alarmRecordManager.saveData(alarmJob!=null?alarmJob.getId():null, "1", alarmLevel, + alarmRecordManager.saveData(alarmJob != null ? alarmJob.getId() : null, "1", alarmLevel, TempHumiAlarmEnum.OVER_THRESH.getName(), alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell.getWellCode(), upTime, @@ -175,7 +205,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "液位超限", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob!=null?alarmJob.getId():null, TempHumiAlarmEnum.OVER_THRESH.getName(), + ThreadUtil.excuteMsg(alarmJob != null ? alarmJob.getId() : null, TempHumiAlarmEnum.OVER_THRESH.getName(), busWell.getWellCode()); } } else {//未超限 @@ -207,10 +237,10 @@ BusWell busWell = busWellManager.getWellByDevCode(devCode); //3.若无工单写入新的工单 - AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode,"0"); + AlarmJob alarmJob = alarmJobManager.getByDevAndType(devCode, "0"); if (null == alarmJob) { alarmJob = alarmJobManager.saveData(devCode, busWell != null ? - busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(),"0"); + busWell.getWellCode() : "", DeviceTypeEnum.TempHumi.toString(), "0"); } //4.写入新的报警 alarmRecordManager.saveData(alarmJob.getId(), "2", null, diff --git a/src/main/java/org/well/well/manager/WasteGasManager.java b/src/main/java/org/well/well/manager/WasteGasManager.java index 64c9be6..0db8958 100644 --- a/src/main/java/org/well/well/manager/WasteGasManager.java +++ b/src/main/java/org/well/well/manager/WasteGasManager.java @@ -1,11 +1,13 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WasteGasAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; @@ -13,7 +15,14 @@ import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; -import org.well.well.util.PushList; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; +//import org.well.well.util.PushList; import javax.annotation.Resource; import java.io.IOException; @@ -159,7 +168,11 @@ public void processNormalData(JSONObject jsonObject, String devCode) { JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("","Power"); + String[] realParam = new String[]{"CO","O2","H2S","CH4","liquidSwitch"}; + String[] standardParam = new String[]{"CO","O2","H2S","CH4","LiquidSwitch"}; for (int i = 0; i < jsonArray.size(); i++) { try { String CO = ((JSONObject) jsonArray.get(i)).get("CO").toString(); @@ -176,6 +189,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate((JSONObject) jsonArray.get(i), realParam, standardParam, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WASTE_GAS, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } + //3.存数据 saveData(devCode, busWell == null ? "" : busWell.getWellCode(), upTime, CO, O2, H2S, CH4, isOpen);//存采集数据 @@ -225,6 +248,15 @@ alarmValue.substring(0, alarmValue.length() - 1), device, devCode, busWell != null ? busWell.getWellCode() : "", upTime, alarmContent.substring(0, alarmContent.length() - 1)); + + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WASTE_GAS, StandardDataUtils.alarmDataBuilder(devCode, alarmValue, + WasteGasAlarmEnum.OVER_THRESH.getName()), upTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } //3.toDo:向app推送报警消息 // List cids = userManager.getClients(busWell); // if (cids.size() > 0) diff --git a/src/main/java/org/well/well/manager/WellManager.java b/src/main/java/org/well/well/manager/WellManager.java index 23b58fb..8e6b535 100644 --- a/src/main/java/org/well/well/manager/WellManager.java +++ b/src/main/java/org/well/well/manager/WellManager.java @@ -1,20 +1,31 @@ package org.well.well.manager; +import com.alibaba.fastjson.JSON; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import org.well.well.AlarmEnumDTO.TempHumiAlarmEnum; import org.well.well.AlarmEnumDTO.WellAlarmEnum; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.hibernate.HibernateEntityDao; +import org.well.well.core.util.StringUtils; import org.well.well.core.util.ThreadUtil; import org.well.well.domain.*; +import org.well.well.kafka.Producer; +import org.well.well.kafka.StandardAlarm; +import org.well.well.kafka.StandardData; +import org.well.well.kafka.StandardDataUtils; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; +import org.well.well.util.Configure; import javax.annotation.Resource; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -46,8 +57,12 @@ } public void processNormalData(JSONObject jsonObject, String devCode) { + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); JSONArray jsonArray = (JSONArray) jsonObject.get("datas"); BusWell busWell = busWellManager.getWellByDevCode(devCode); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.NORMAL.getName(), "Status")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); for (int i = 0; i < jsonArray.size(); i++) { try { String level = ((JSONObject) jsonArray.get(i)).get("value").toString();//"00" @@ -56,6 +71,16 @@ deviceManager.clearOffline(devCode); //2.清除设备告警 alarmRecordManager.clearAlarmByNormalData(devCode, "2"); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, upTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + logger.debug("producer start."); + Producer.send(JSON.toJSONString(standardData), dataTopic); + logger.debug("producer end."); + } + } //3.存数据 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.NORMAL.toString(), @@ -71,16 +96,27 @@ JSONArray jsonArray = (JSONArray) jsonObject.get("eventType"); String logTime = (jsonObject.get("logTime")).toString(); BusWell busWell = busWellManager.getWellByDevCode(devCode); + boolean kafkaDataFlag = jsonObject.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG); + List> appendList = StandardDataUtils.appendListBuilder("", "Power"); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Angle")); + appendList.addAll(StandardDataUtils.appendListBuilder(WellAlarmEnum.WellOpenAlarm.getName(), "Status")); for (int i = 0; i < jsonArray.size(); i++) { try { //1.清离线 deviceManager.clearOffline(devCode); + if (!kafkaDataFlag) { + String standardStatus = StandardDataUtils.standardStatusFomate(null, null, null, appendList); + StandardData standardData = new StandardData(devCode, DeviceTypeConstant.WELL, standardStatus, logTime); + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(JSON.toJSONString(standardData), dataTopic); + } + } //2.存入数据表 this.saveData(devCode, busWell == null ? "" : busWell.getWellCode(), WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getName(), String.valueOf(WellAlarmEnum.valueOf(jsonArray.get(i).toString()).getIndex())); //3.根据不同的报警,进入不同的处理方法(主要分开盖和其他类型的报警) - //3.1井盖开盖报警处理 if (WellAlarmEnum.WellOpenAlarm.name().equals(jsonArray.get(i).toString())) { alarmRecordManager.clearAlarm(devCode, "2", "");//清除设备本身报警 @@ -102,6 +138,14 @@ //生成新的工单 AlarmJob alarmJob = alarmJobManager.saveData(devCode, busWell.getWellCode(), DeviceTypeEnum.Well.toString(), "1"); + if (!kafkaDataFlag) { + StandardAlarm standardAlarm = new StandardAlarm(devCode, DeviceTypeConstant.WELL, StandardDataUtils.alarmDataBuilder(devCode, WellAlarmEnum.WellOpenAlarm.getName(), + TempHumiAlarmEnum.OVER_THRESH.getName()), logTime); + String alarmTopic = Configure.getProperty(TopicConstant.ALARM_TOPIC, ""); + if (!StringUtils.isEmpty(alarmTopic)) { + Producer.send(JSON.toJSONString(standardAlarm), alarmTopic); + } + } alarmRecordManager.saveData(alarmJob.getId(), "1", null, WellAlarmEnum.WellOpenAlarm.getName(), @@ -112,7 +156,7 @@ // List cids = userManager.getClients(busWell); // if (cids.size() > 0) // PushList.pushToUser(cids, "井盖开盖报警", "报警啦,快来处理"); - ThreadUtil.excuteMsg(alarmJob.getId(),WellAlarmEnum.WellOpenAlarm.getName(), + ThreadUtil.excuteMsg(alarmJob.getId(), WellAlarmEnum.WellOpenAlarm.getName(), busWell == null ? "" : busWell.getWellCode()); } } diff --git a/src/main/java/org/well/well/resp/WaterQualityResponse.java b/src/main/java/org/well/well/resp/WaterQualityResponse.java index 47d3674..87b18ea 100644 --- a/src/main/java/org/well/well/resp/WaterQualityResponse.java +++ b/src/main/java/org/well/well/resp/WaterQualityResponse.java @@ -9,10 +9,15 @@ import org.well.well.base.AbstractResponse; import org.well.well.base.DeviceTypeEnum; import org.well.well.core.util.DateUtils; +import org.well.well.core.util.StringUtils; +import org.well.well.kafka.Producer; +import org.well.well.kafka.constant.DeviceTypeConstant; +import org.well.well.kafka.constant.TopicConstant; import org.well.well.manager.DeviceConfigManager; import org.well.well.manager.ImeiManager; import org.well.well.manager.LiquidManager; import org.well.well.manager.MeterManager; +import org.well.well.util.Configure; import java.util.HashMap; import java.util.Map; @@ -29,24 +34,38 @@ try { ClassPathXmlApplicationContext ac = this.getAc(); MeterManager meterManager = ac.getBean(MeterManager.class); + //直接推送 JSONObject json = JSONObject.fromObject(content); String devCode = json.get("DevID").toString(); + String dataTimeStr = json.get("LogTime").toString(); Map meterMap = new HashMap(); Map meterMapBefore = new HashMap(); - meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]","")); + meterMap.put("DataTime", dataTimeStr.replaceAll("[[\\s-:punct:]]", "")); meterMap.put("CN", "2011"); + meterMap.put("LogTime", dataTimeStr); meterMap.put("devCode", devCode); -// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); +// JSONArray jsonArray = JSON.parseArray(json.get("Status").toString()); JSONArray jsonArray = JSONArray.fromObject(json.get("Status")); for (int i = 0; i < jsonArray.size(); i++) { String name = ((JSONObject) jsonArray.get(i)).get("Key").toString(); String value = ((JSONObject) jsonArray.get(i)).get("Value").toString(); - meterMapBefore.put(name,value); + meterMapBefore.put(name, value); } + //包含ProviderData,且值为1,说明来自于外协厂家,直接进行大数据平台的转发 + if (json.containsKey("ProviderData")&&json.get("ProviderData").toString().equals("1")) { + String dataTopic = Configure.getProperty(TopicConstant.DATA_TOPIC, ""); + if (!StringUtils.isEmpty(dataTopic)) { + Producer.send(content, dataTopic); + } + } + //水质的数据分别从大数据平台和来源第三方外协 + //由于大数据平台直接转发,没有kafkaDataFlag设置,在第三方转发的时候添加的false, + // 所以这里的逻辑和格式 和其他设备不一样 + meterMap.put(DeviceTypeConstant.KAFKA_DATA_FLAG, json.containsKey(DeviceTypeConstant.KAFKA_DATA_FLAG) ? "0" : "1"); meterMap.put("temp", meterMapBefore.get("Temp")); meterMap.put("PH", meterMapBefore.get("PH")); - meterMap.put("conductivity", meterMapBefore.get("Cond")); + meterMap.put("conductivity", meterMapBefore.get("Cond")); meterMap.put("DO", meterMapBefore.get("DO")); meterMap.put("turbidity", meterMapBefore.get("Turb")); meterMap.put("COD", meterMapBefore.get("COD")); @@ -54,7 +73,7 @@ meterMap.put("TP", meterMapBefore.get("TP")); meterMap.put("TN", meterMapBefore.get("TN")); meterManager.processMeterData(meterMap); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); } } diff --git a/src/main/java/org/well/well/util/PushList.java b/src/main/java/org/well/well/util/PushList.java index 74568fe..ba5e1af 100644 --- a/src/main/java/org/well/well/util/PushList.java +++ b/src/main/java/org/well/well/util/PushList.java @@ -1,85 +1,85 @@ -package org.well.well.util; - -import java.util.ArrayList; -import java.util.List; - -import com.gexin.rp.sdk.base.IPushResult; -import com.gexin.rp.sdk.base.impl.ListMessage; -import com.gexin.rp.sdk.base.impl.Target; -import com.gexin.rp.sdk.http.IGtPush; -import com.gexin.rp.sdk.template.NotificationTemplate; -import com.gexin.rp.sdk.template.style.Style0; - -/** - * Created by test203 on 2019/6/11. - */ - - -public class PushList { - //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; - - private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); - private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); - private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); - - //别名推送方式 - // static String Alias1 = ""; - // static String Alias2 = ""; - static String host = "http://sdk.open.api.igexin.com/apiex.htm"; - - public static void pushToUser(List cids, String title, String content){ - // 配置返回每个用户返回用户状态,可选 - System.setProperty("gexin_pushList_needDetails", "true"); - // 配置返回每个别名及其对应cid的用户状态,可选 - // System.setProperty("gexin_pushList_needAliasDetails", "true"); - IGtPush push = new IGtPush(host, appKey, masterSecret); - // 通知透传模板 - NotificationTemplate template = notificationTemplateDemo(title,content); - ListMessage message = new ListMessage(); - message.setData(template); - // 设置消息离线,并设置离线时间 - message.setOffline(true); - // 离线有效时间,单位为毫秒,可选 - message.setOfflineExpireTime(24 * 1000 * 3600); - // 配置推送目标 - List targets = new ArrayList(); - for (String cid : cids) { - Target target = new Target(); - target.setAppId(appId); - target.setClientId(cid); - targets.add(target); - } - - // taskId用于在推送时去查找对应的message - String taskId = push.getContentId(message); - IPushResult ret = push.pushMessageToList(taskId, targets); - System.out.println(ret.getResponse().toString()); - } - - public static NotificationTemplate notificationTemplateDemo(String title,String content) { - NotificationTemplate template = new NotificationTemplate(); - // 设置APPID与APPKEY - template.setAppId(appId); - template.setAppkey(appKey); - - Style0 style = new Style0(); - // 设置通知栏标题与内容 - style.setTitle(title); - style.setText(content); - // 配置通知栏图标 - style.setLogo("icon.png"); - // 配置通知栏网络图标 - style.setLogoUrl(""); - // 设置通知是否响铃,震动,或者可清除 - style.setRing(true); - style.setVibrate(true); - style.setClearable(true); - template.setStyle(style); - - // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 - template.setTransmissionType(2); - template.setTransmissionContent("请输入您要透传的内容"); - return template; - } -} - +//package org.well.well.util; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import com.gexin.rp.sdk.base.IPushResult; +//import com.gexin.rp.sdk.base.impl.ListMessage; +//import com.gexin.rp.sdk.base.impl.Target; +//import com.gexin.rp.sdk.http.IGtPush; +//import com.gexin.rp.sdk.template.NotificationTemplate; +//import com.gexin.rp.sdk.template.style.Style0; +// +///** +// * Created by test203 on 2019/6/11. +// */ +// +// +//public class PushList { +// //采用"Java SDK 快速入门", "第二步 获取访问凭证 "中获得的应用配置,用户可以自行替换; +// +// private static String appId = Configure.getProperty("gt.appId","qQqFtTBrUa7SGLjntUlpT4"); +// private static String appKey = Configure.getProperty("gt.appKey","GNjHLiqJkD6OoeZbtbdfi5"); +// private static String masterSecret = Configure.getProperty("gt.masterSecret","TZRYQknkxLAJ58uH56PeS6"); +// +// //别名推送方式 +// // static String Alias1 = ""; +// // static String Alias2 = ""; +// static String host = "http://sdk.open.api.igexin.com/apiex.htm"; +// +// public static void pushToUser(List cids, String title, String content){ +// // 配置返回每个用户返回用户状态,可选 +// System.setProperty("gexin_pushList_needDetails", "true"); +// // 配置返回每个别名及其对应cid的用户状态,可选 +// // System.setProperty("gexin_pushList_needAliasDetails", "true"); +// IGtPush push = new IGtPush(host, appKey, masterSecret); +// // 通知透传模板 +// NotificationTemplate template = notificationTemplateDemo(title,content); +// ListMessage message = new ListMessage(); +// message.setData(template); +// // 设置消息离线,并设置离线时间 +// message.setOffline(true); +// // 离线有效时间,单位为毫秒,可选 +// message.setOfflineExpireTime(24 * 1000 * 3600); +// // 配置推送目标 +// List targets = new ArrayList(); +// for (String cid : cids) { +// Target target = new Target(); +// target.setAppId(appId); +// target.setClientId(cid); +// targets.add(target); +// } +// +// // taskId用于在推送时去查找对应的message +// String taskId = push.getContentId(message); +// IPushResult ret = push.pushMessageToList(taskId, targets); +// System.out.println(ret.getResponse().toString()); +// } +// +// public static NotificationTemplate notificationTemplateDemo(String title,String content) { +// NotificationTemplate template = new NotificationTemplate(); +// // 设置APPID与APPKEY +// template.setAppId(appId); +// template.setAppkey(appKey); +// +// Style0 style = new Style0(); +// // 设置通知栏标题与内容 +// style.setTitle(title); +// style.setText(content); +// // 配置通知栏图标 +// style.setLogo("icon.png"); +// // 配置通知栏网络图标 +// style.setLogoUrl(""); +// // 设置通知是否响铃,震动,或者可清除 +// style.setRing(true); +// style.setVibrate(true); +// style.setClearable(true); +// template.setStyle(style); +// +// // 透传消息设置,1为强制启动应用,客户端接收到消息后就会立即启动应用;2为等待应用启动 +// template.setTransmissionType(2); +// template.setTransmissionContent("请输入您要透传的内容"); +// return template; +// } +//} +// diff --git a/src/main/java/org/well/well/util/ResponseResolver.java b/src/main/java/org/well/well/util/ResponseResolver.java index 6fef231..f73734e 100644 --- a/src/main/java/org/well/well/util/ResponseResolver.java +++ b/src/main/java/org/well/well/util/ResponseResolver.java @@ -20,14 +20,15 @@ static { abstractResponseHashMap.put(DeviceTypeEnum.Liquid.name(), new LiquidResponse()); abstractResponseHashMap.put(DeviceTypeEnum.Well.name(), new WellResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.Concentrator.name(), new ConcentratorResponse());//集中器 abstractResponseHashMap.put(DeviceTypeEnum.Methane.name(), new MethaneResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.WasteGas.name(), new WasteGasResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse()); -// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse()); - abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.TempHumi.name(), new TempHumiResponse());//温湿度 +// abstractResponseHashMap.put(DeviceTypeEnum.Dig.name(), new NoiseDigResponse()); + abstractResponseHashMap.put(DeviceTypeEnum.NoiseDig.name(), new NoiseDigResponse());//噪音开挖 + abstractResponseHashMap.put(DeviceTypeEnum.Locator.name(), new LocatorResponse());//井盖定位 + abstractResponseHashMap.put(DeviceTypeEnum.WaterQuality.name(), new WaterQualityResponse());//水质检测 } diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..4701fab --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,41 @@ +Manifest-Version: 1.0 +Main-Class: org.well.mysql.sink.WellSink +Class-Path: slf4j-log4j12-1.6.1.jar commons-codec-1.9.jar commons-pool2- + 2.4.2.jar javassist-3.18.1-GA.jar jcl-over-slf4j-1.7.7.jar jansi-1.11.j + ar hawtbuf-1.11.jar jackson-databind-2.4.2.jar mina-core-2.0.4.jar juni + t-4.10.jar log4j-1.2.16.jar dom4j-1.6.1.jar hamcrest-core-1.1.jar orika + -core-1.4.5.jar activemq-pool-5.14.5.jar activemq-client-5.14.5.jar spr + ing-beans-4.0.6.RELEASE.jar httpcore-4.2.1.jar jboss-transaction-api_1. + 2_spec-1.0.0.Final.jar commons-io-2.4.jar concurrentlinkedhashmap-lru-1 + .2_jdk5.jar commons-cli-1.2.jar commons-fileupload-1.3.1.jar jsr305-1.3 + .9.jar gson-2.2.2.jar jackson-jaxrs-json-provider-2.4.2.jar jetty-6.1.2 + 6.jar janino-2.7.6.jar commons-beanutils-1.9.2.jar hibernate-core-4.3.6 + .Final.jar guava-10.0.1.jar spring-context-support-4.0.6.RELEASE.jar lo + g4jdbc-remix-0.2.7.jar hibernate-jpa-2.1-api-1.0.0.Final.jar servlet-ap + i-2.5-20110124.jar geronimo-jms_1.1_spec-1.1.1.jar java-sizeof-0.0.4.ja + r aopalliance-1.0.jar jandex-1.1.0.Final.jar jboss-logging-3.1.3.GA.jar + antlr-2.7.7.jar validation-api-1.1.0.Final.jar geronimo-jta_1.0.1B_spe + c-1.0.1.jar jackson-jaxrs-base-2.4.2.jar logback-core-1.1.2.jar hiberna + te-validator-5.1.2.Final.jar logback-classic-1.1.2.jar avro-1.7.3.jar s + pring-tx-4.0.6.RELEASE.jar spring-orm-4.0.6.RELEASE.jar activemq-broker + -5.14.5.jar kafka-clients-2.4.0-hw-ei-312005.jar commons-pool-1.5.4.jar + ezmorph-1.0.6.jar netty-3.4.0.Final.jar avro-ipc-1.7.3.jar flume-ng-co + re-1.4.0.jar activemq-openwire-legacy-5.14.5.jar commons-compiler-2.7.6 + .jar jetty-util-6.1.26.jar xbean-spring-4.2.jar jackson-core-asl-1.9.3. + jar jackson-mapper-asl-1.9.3.jar spring-core-4.0.6.RELEASE.jar geronimo + -j2ee-management_1.1_spec-1.0.1.jar ojdbc6-11.1.0.7.0.jar json-lib-2.4- + jdk15.jar activemq-jms-pool-5.14.5.jar jul-to-slf4j-1.7.7.jar velocity- + 1.7.jar joda-time-2.1.jar commons-httpclient-3.1.jar spring-context-4.0 + .6.RELEASE.jar commons-lang3-3.3.2.jar commons-lang-2.5.jar libthrift-0 + .7.0.jar jackson-core-2.4.2.jar hibernate-commons-annotations-4.0.5.Fin + al.jar commons-logging-1.1.1.jar fastjson-1.1.15.jar mysql-connector-ja + va-5.1.25.jar spring-aop-4.0.6.RELEASE.jar slf4j-api-1.7.7.jar httpclie + nt-4.2.1.jar flume-ng-configuration-1.4.0.jar jboss-logging-annotations + -1.2.0.Beta1.jar snappy-java-1.0.4.1.jar paranamer-2.3.jar flume-ng-sdk + -1.4.0.jar spring-webmvc-4.0.6.RELEASE.jar jackson-annotations-2.4.0.ja + r jackson-module-jaxb-annotations-2.4.2.jar commons-dbcp-1.4.jar spring + -jms-3.2.8.RELEASE.jar spring-expression-4.0.6.RELEASE.jar jstl-1.2.jar + xml-apis-1.0.b2.jar activemq-spring-5.14.5.jar spring-jdbc-4.0.6.RELEA + SE.jar classmate-1.0.0.jar commons-collections-3.2.1.jar spring-web-4.0 + .6.RELEASE.jar + diff --git a/src/main/resources/wellSensor/77042.jaas.conf b/src/main/resources/wellSensor/77042.jaas.conf new file mode 100644 index 0000000..3abd31a --- /dev/null +++ b/src/main/resources/wellSensor/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/wellSensor/application.properties b/src/main/resources/wellSensor/application.properties index 0c2d25c..a8970d1 100644 --- a/src/main/resources/wellSensor/application.properties +++ b/src/main/resources/wellSensor/application.properties @@ -20,9 +20,9 @@ #db.default.username=sensor #db.default.password=sensor db.default.driverClassName=com.mysql.jdbc.Driver -db.default.url=jdbc:mysql://192.168.0.166:3306/smartwell?useUnicode=true&characterEncoding=UTF-8&useSSL=false +db.default.url=jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?useUnicode=true&characterEncoding=UTF-8&useSSL=false db.default.username=root -db.default.password=root +db.default.password=casic203yz2db ### ============================================================================ # dbReal # ============================================================================ @@ -157,8 +157,8 @@ # ============================================================================ # ACTIVEMQ配置 # ============================================================================ -activemq_url = tcp://192.168.0.203:61616 -activemq_username = +activemq_url = tcp://127.0.0.1:61616 +activemq_username ="" activemq_password = @@ -166,6 +166,13 @@ # 告警、工单推送地址 # ============================================================================ #sendURL =http://111.198.10.15:11302/smartwell/job/updateSinkJob -sendURL =http://localhost:14537/job/updateSinkJob +sendURL =http://192.168.0.218:80/smartwell/job/updateSinkJob + +# ============================================================================ +# kafka大数据平台 + +bootstrapServer=192.168.65.14:21005,192.168.65.15:21005,192.168.65.16:21005 +alarmTopic=MSGQUEUE_8287 +dataTopic=TEMPSTORE_8204 diff --git a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml index a11375d..4b1a083 100644 --- a/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml +++ b/src/main/resources/wellSensor/applicationContex-ActiveMQ.xml @@ -16,8 +16,7 @@ + userName="${activemq_username}"/> diff --git a/src/main/resources/wellSensor/consumer.properties b/src/main/resources/wellSensor/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/wellSensor/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/wellSensor/kafkaSecurityMode b/src/main/resources/wellSensor/kafkaSecurityMode new file mode 100644 index 0000000..ed59a5e --- /dev/null +++ b/src/main/resources/wellSensor/kafkaSecurityMode @@ -0,0 +1 @@ +kafka.client.security.mode = yes diff --git a/src/main/resources/wellSensor/krb5.conf b/src/main/resources/wellSensor/krb5.conf new file mode 100644 index 0000000..003c6c7 --- /dev/null +++ b/src/main/resources/wellSensor/krb5.conf @@ -0,0 +1,48 @@ +[kdcdefaults] +kdc_ports = 192.168.65.19:21732 +kdc_tcp_ports = "" + +[libdefaults] +default_realm = HADOOP.COM +kdc_timeout = 2500 +clockskew = 300 +use_dns_lookup = 0 +udp_preference_limit = 1465 +max_retries = 5 +dns_lookup_kdc = false +dns_lookup_realm = false +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +allow_extend_version = false +default_ccache_name = FILE:/tmp//krb5cc_%{uid} + +[realms] +HADOOP.COM = { +kdc = 192.168.65.19:21732 +kdc = 192.168.65.18:21732 +admin_server = 192.168.65.19:21730 +admin_server = 192.168.65.18:21730 +kpasswd_server = 192.168.65.19:21731 +kpasswd_server = 192.168.65.18:21731 +kpasswd_port = 21731 +kadmind_port = 21730 +kadmind_listen = 192.168.65.19:21730 +kpasswd_listen = 192.168.65.19:21731 +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +acl_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/kadm5.acl +dict_file = /opt/huawei/Bigdata/common/runtime/security/weakPasswdDic/weakPasswdForKdc.ini +key_stash_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/.k5.HADOOP.COM +} + +[domain_realm] +.hadoop.com = HADOOP.COM + +[logging] +kdc = SYSLOG:INFO:DAEMON +admin_server = SYSLOG:INFO:DAEMON +default = SYSLOG:NOTICE:DAEMON diff --git a/src/main/resources/wellSensor/log4j.properties b/src/main/resources/wellSensor/log4j.properties new file mode 100644 index 0000000..10e767d --- /dev/null +++ b/src/main/resources/wellSensor/log4j.properties @@ -0,0 +1,37 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +kafka.logs.dir=logs + +log4j.rootLogger=INFO, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.logger.kafka=ERROR, kafkaAppender + +log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.kafkaAppender.File=${kafka.logs.dir}/client.log +log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +# Turn on all our debugging info +#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender +#log4j.logger.kafka.client.ClientUtils=DEBUG, kafkaAppender +#log4j.logger.kafka.perf=DEBUG, kafkaAppender +#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender +#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG diff --git a/src/main/resources/wellSensor/producer.properties b/src/main/resources/wellSensor/producer.properties new file mode 100644 index 0000000..5e6446a --- /dev/null +++ b/src/main/resources/wellSensor/producer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +acks = 1 +bootstrap.servers = 192.168.65.16:21007,192.168.65.15:21007,192.168.65.14:21007 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/wellSensor/user.keytab b/src/main/resources/wellSensor/user.keytab new file mode 100644 index 0000000..a10b711 --- /dev/null +++ b/src/main/resources/wellSensor/user.keytab Binary files differ