diff --git a/src/main/java/com/xydl/config/DruidConfig.java b/src/main/java/com/xydl/config/DruidConfig.java deleted file mode 100644 index f7e14f6..0000000 --- a/src/main/java/com/xydl/config/DruidConfig.java +++ /dev/null @@ -1,15 +0,0 @@ -package com.xydl.config; - -import com.alibaba.druid.pool.DruidDataSource; -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class DruidConfig { - @ConfigurationProperties(prefix = "spring.datasource") - @Bean - public DruidDataSource getDurid(){ - return new DruidDataSource(); - } -} diff --git a/src/main/java/com/xydl/config/MyBatisConfig.java b/src/main/java/com/xydl/config/MyBatisConfig.java deleted file mode 100644 index 09be1fb..0000000 --- a/src/main/java/com/xydl/config/MyBatisConfig.java +++ /dev/null @@ -1,20 +0,0 @@ -package com.xydl.config; - - -import com.baomidou.mybatisplus.autoconfigure.ConfigurationCustomizer; -import com.baomidou.mybatisplus.core.MybatisConfiguration; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class MyBatisConfig { - - public ConfigurationCustomizer configurationCustomizer(){ - return new ConfigurationCustomizer() { - @Override - public void customize(MybatisConfiguration configuration) { - //下划线与驼峰命名进行自动映射 - configuration.setMapUnderscoreToCamelCase(true); - } - }; - } -} diff --git a/src/main/java/com/xydl/controller/JDBCController.java b/src/main/java/com/xydl/controller/JDBCController.java deleted file mode 100644 index 6e4573f..0000000 --- a/src/main/java/com/xydl/controller/JDBCController.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.xydl.controller; - -import com.xydl.util.DataSourceUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; - -import javax.sql.DataSource; -import java.sql.*; - - -@RestController -public class JDBCController { - private static final Logger logger = LoggerFactory.getLogger(DataSourceUtils.class); - - - @RequestMapping("/data") - public void getData() { - Connection conn = null; - PreparedStatement pstmt = null; - try { - conn = DataSourceUtils.getConnection(); - String sql = "insert into pop_id(node_id,id) values(?,?)"; - pstmt = conn.prepareStatement(sql); - pstmt.setString(1, ""); - pstmt.setInt(2, 100); - pstmt.executeUpdate(); - } catch (Exception e) { - logger.error("execute sql exception:", e); - } finally { - DataSourceUtils.closeResource(pstmt, conn); - } - } - - - - - -} - - - - diff --git a/src/main/java/com/xydl/controller/TestController.java b/src/main/java/com/xydl/controller/TestController.java index 2114835..d2b56f7 100644 --- a/src/main/java/com/xydl/controller/TestController.java +++ b/src/main/java/com/xydl/controller/TestController.java @@ -1,26 +1,18 @@ package com.xydl.controller; - -import com.xydl.model.Eia; -import com.xydl.service.EiaService; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseBody; -import java.util.List; @Controller public class TestController { - @Autowired - private EiaService eiaService; - @RequestMapping("/eia") @ResponseBody - public List eia() { - System.out.println("get eia"); - return eiaService.getEia(); + public String test() { + System.out.println("==="); + return "测试成功"; } } \ No newline at end of file diff --git a/src/main/java/com/xydl/mapper/EaifMapper.java b/src/main/java/com/xydl/mapper/EaifMapper.java deleted file mode 100644 index ad6b59f..0000000 --- a/src/main/java/com/xydl/mapper/EaifMapper.java +++ /dev/null @@ -1,21 +0,0 @@ -package com.xydl.mapper; - - -import com.xydl.model.Eaif; -import com.xydl.model.Epa; -import org.apache.ibatis.annotations.Mapper; -import org.springframework.stereotype.Repository; - -import java.util.List; - -@Mapper -@Repository -public interface EaifMapper { - - - //红外测温 - List getEaif(); - - - -} diff --git a/src/main/java/com/xydl/mapper/EiaMapper.java b/src/main/java/com/xydl/mapper/EiaMapper.java deleted file mode 100644 index a51bfda..0000000 --- a/src/main/java/com/xydl/mapper/EiaMapper.java +++ /dev/null @@ -1,21 +0,0 @@ -package com.xydl.mapper; - -import com.xydl.model.Eia; -import org.apache.ibatis.annotations.Mapper; -import org.springframework.stereotype.Repository; - -import java.util.List; - -@Mapper -@Repository -public interface EiaMapper { - - - //铁芯、夹件、电容性设备 - List getEia(); - - - - - -} diff --git a/src/main/java/com/xydl/mapper/EpaMapper.java b/src/main/java/com/xydl/mapper/EpaMapper.java deleted file mode 100644 index ad27a16..0000000 --- a/src/main/java/com/xydl/mapper/EpaMapper.java +++ /dev/null @@ -1,34 +0,0 @@ -package com.xydl.mapper; - -import com.xydl.model.Epa; -import com.xydl.model.SuperModel; -import org.apache.ibatis.annotations.Mapper; -import org.springframework.stereotype.Repository; - -import java.util.List; - -@Mapper -@Repository -public interface EpaMapper { - - - //油色谱 - List getEpa(int devid); - - List getData(String sql); - - List getEqmidsByTableName(String tableName); - - List getTableNamesBySyncTable(String syncTable); - - String getSqlBySyncTable(String syncTable, String tableName); - - - - - - - - - -} diff --git a/src/main/java/com/xydl/mapper/EtpMapper.java b/src/main/java/com/xydl/mapper/EtpMapper.java deleted file mode 100644 index 62909c7..0000000 --- a/src/main/java/com/xydl/mapper/EtpMapper.java +++ /dev/null @@ -1,21 +0,0 @@ -package com.xydl.mapper; - -import com.xydl.model.Etp; -import org.apache.ibatis.annotations.Mapper; -import org.springframework.stereotype.Repository; - -import java.util.List; - -@Mapper -@Repository -public interface EtpMapper { - - - //绕组测温 - List getEtp(); - - - - - -} diff --git a/src/main/java/com/xydl/mapper/MicMapper.java b/src/main/java/com/xydl/mapper/MicMapper.java deleted file mode 100644 index 232c068..0000000 --- a/src/main/java/com/xydl/mapper/MicMapper.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.xydl.mapper; - - -import com.xydl.model.Epa; -import com.xydl.model.Microclimate; -import org.apache.ibatis.annotations.Mapper; -import org.springframework.stereotype.Repository; - -import java.util.List; - -@Mapper -@Repository -public interface MicMapper { - - - - //微气象 - List getMicroclimate(); - - - - - -} diff --git a/src/main/java/com/xydl/mapper/MoaMapper.java b/src/main/java/com/xydl/mapper/MoaMapper.java deleted file mode 100644 index 06e3bbe..0000000 --- a/src/main/java/com/xydl/mapper/MoaMapper.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.xydl.mapper; - - -import com.xydl.model.Epa; -import com.xydl.model.Moa; -import org.apache.ibatis.annotations.Mapper; -import org.springframework.stereotype.Repository; - -import java.util.List; - -@Mapper -@Repository -public interface MoaMapper { - - - - //绝缘监测-金属氧化物 - List getMoa(); - - - - - -} diff --git a/src/main/java/com/xydl/mapper/OperationDB.java b/src/main/java/com/xydl/mapper/OperationDB.java new file mode 100644 index 0000000..f46fbda --- /dev/null +++ b/src/main/java/com/xydl/mapper/OperationDB.java @@ -0,0 +1,12 @@ +package com.xydl.mapper; + +import java.util.List; + +public interface OperationDB { + + List getAllDevId(String tableName); + + String getLastTime(String tableName, String devId); + + +} diff --git a/src/main/java/com/xydl/mapper/PdMapper.java b/src/main/java/com/xydl/mapper/PdMapper.java deleted file mode 100644 index fccc8f0..0000000 --- a/src/main/java/com/xydl/mapper/PdMapper.java +++ /dev/null @@ -1,25 +0,0 @@ -package com.xydl.mapper; - - -import com.xydl.model.Epa; -import com.xydl.model.Pd; -import org.apache.ibatis.annotations.Mapper; -import org.springframework.stereotype.Repository; - -import java.util.List; - -@Mapper -@Repository -public interface PdMapper { - - - - //局放监测 - List getPd(); - - - - - - -} diff --git a/src/main/java/com/xydl/mapper/RptTemperMapper.java b/src/main/java/com/xydl/mapper/RptTemperMapper.java deleted file mode 100644 index 148ecc3..0000000 --- a/src/main/java/com/xydl/mapper/RptTemperMapper.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.xydl.mapper; - -import com.xydl.model.Eia; -import com.xydl.model.Epa; -import com.xydl.model.Etp; -import com.xydl.model.RptTemper; -import org.apache.ibatis.annotations.Mapper; -import org.springframework.stereotype.Repository; - -import java.util.List; - -@Mapper -@Repository -public interface RptTemperMapper { - - - - //测温点 - List getRptTemper(); - - - - -} diff --git a/src/main/java/com/xydl/mapper/ScurMapper.java b/src/main/java/com/xydl/mapper/ScurMapper.java deleted file mode 100644 index c437371..0000000 --- a/src/main/java/com/xydl/mapper/ScurMapper.java +++ /dev/null @@ -1,25 +0,0 @@ -package com.xydl.mapper; - - -import com.xydl.model.Epa; -import com.xydl.model.Scur; -import org.apache.ibatis.annotations.Mapper; -import org.springframework.stereotype.Repository; - -import java.util.List; - -@Mapper -@Repository -public interface ScurMapper { - - - - //电缆环流 - List getScur(); - - - - - - -} diff --git a/src/main/java/com/xydl/mapper/Sf6Mapper.java b/src/main/java/com/xydl/mapper/Sf6Mapper.java deleted file mode 100644 index 53c497f..0000000 --- a/src/main/java/com/xydl/mapper/Sf6Mapper.java +++ /dev/null @@ -1,22 +0,0 @@ -package com.xydl.mapper; - -import com.xydl.model.Epa; -import com.xydl.model.Sf6; -import org.apache.ibatis.annotations.Mapper; -import org.springframework.stereotype.Repository; - -import java.util.List; - -@Mapper -@Repository -public interface Sf6Mapper { - - - //SF6监测 - List getSf6(); - - - - - -} diff --git a/src/main/java/com/xydl/mapper/Sf6envMapper.java b/src/main/java/com/xydl/mapper/Sf6envMapper.java deleted file mode 100644 index 22fb0f0..0000000 --- a/src/main/java/com/xydl/mapper/Sf6envMapper.java +++ /dev/null @@ -1,22 +0,0 @@ -package com.xydl.mapper; - - -import com.xydl.model.Epa; -import com.xydl.model.Sf6env; -import org.apache.ibatis.annotations.Mapper; -import org.springframework.stereotype.Repository; - -import java.util.List; - -@Mapper -@Repository -public interface Sf6envMapper { - - - //SF6环境 - List getSf6env(); - - - - -} diff --git a/src/main/java/com/xydl/model/Eaif.java b/src/main/java/com/xydl/model/Eaif.java deleted file mode 100644 index 2905b27..0000000 --- a/src/main/java/com/xydl/model/Eaif.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.xydl.model; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - -import java.sql.Timestamp; -import java.util.Date; - -@Data -public class Eaif { - - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String captureTime; - - @JsonProperty("MaxTmp") - private double maxTemp; - - @JsonProperty("MinTmp") - private double minTemp; - - @JsonProperty("AvgTmp") - private double avgTemp; - -} diff --git a/src/main/java/com/xydl/model/Eia.java b/src/main/java/com/xydl/model/Eia.java deleted file mode 100644 index ba7ab65..0000000 --- a/src/main/java/com/xydl/model/Eia.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.xydl.model; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - -import java.util.Date; - -@Data -public class Eia { - - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String captureTime; - - @JsonProperty("MaxTmp") - private String maxTemp; - - @JsonProperty("MinTmp") - private String minTemp; - - @JsonProperty("AvgTmp") - private String avgTemp; - - @JsonProperty("Phase") - private String phase; - - -} diff --git a/src/main/java/com/xydl/model/Epa.java b/src/main/java/com/xydl/model/Epa.java deleted file mode 100644 index d28fb7b..0000000 --- a/src/main/java/com/xydl/model/Epa.java +++ /dev/null @@ -1,65 +0,0 @@ -package com.xydl.model; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; -import net.sf.jsqlparser.expression.DateTimeLiteralExpression; - -import java.sql.Timestamp; -import java.text.SimpleDateFormat; -import java.util.Date; - -/* -油色谱 - */ -@Data -public class Epa { - - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String dTime ; - - @JsonProperty("H2") - private double h2ppm; - - @JsonProperty("CH4") - private double ch4ppm; - - @JsonProperty("C2H6") - private double c2h6ppm; - - @JsonProperty("C2H4") - private double c2h4ppm; - - @JsonProperty("C2H2") - private double c2h2ppm; - - @JsonProperty("CO") - private double coppm; - - @JsonProperty("CO2") - private double co2ppm; - - @JsonProperty("O2") - private double o2ppm; - - @JsonProperty("N2") - private double n2ppm; - - @JsonProperty("TotalHydrocarbon") - private double totalHydroCarbon; - - @JsonProperty("GasPress") - private double gaspress; - - @JsonProperty("H2O") - private double h2oppm; - - @JsonProperty("Phase") - private String phase; - -} diff --git a/src/main/java/com/xydl/model/Etp.java b/src/main/java/com/xydl/model/Etp.java deleted file mode 100644 index bc51eaa..0000000 --- a/src/main/java/com/xydl/model/Etp.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.xydl.model; - -import com.baomidou.mybatisplus.annotation.TableField; -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - -import java.sql.Timestamp; -import java.util.Date; - -@Data -public class Etp { - - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String dTime; - - @JsonProperty("MaxTmp") - private double t1; - - - - - - - - - -} diff --git a/src/main/java/com/xydl/model/Microclimate.java b/src/main/java/com/xydl/model/Microclimate.java deleted file mode 100644 index 33762d3..0000000 --- a/src/main/java/com/xydl/model/Microclimate.java +++ /dev/null @@ -1,42 +0,0 @@ -package com.xydl.model; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - -import java.sql.Timestamp; -import java.util.Date; - -@Data -public class Microclimate { - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String dTime; - - @JsonProperty("AirTemperature") - private double envTmp; - - @JsonProperty("AirPressure") - private double envPres; - - @JsonProperty("Humidity") - private double envHum; - - @JsonProperty("Precipitation") - private double rnfll; - - @JsonProperty("PrecipitationIntensity") - private double PreciInten = 0; - - @JsonProperty("RadiationIntensity") - private double radiInten = 0; - - - - - -} diff --git a/src/main/java/com/xydl/model/Moa.java b/src/main/java/com/xydl/model/Moa.java deleted file mode 100644 index 65a939d..0000000 --- a/src/main/java/com/xydl/model/Moa.java +++ /dev/null @@ -1,36 +0,0 @@ -package com.xydl.model; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - -import java.sql.Timestamp; -import java.util.Date; - -@Data -public class Moa { - - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String captureTime; - - @JsonProperty("SystemVoltage") - private double pt1; - - @JsonProperty("TotalCurrent") - private double lc1; - - @JsonProperty("ResistiveCurrent") - private double rc1; - - @JsonProperty("ActionCount") - private double ligcnt1; - - @JsonProperty("LastActionTime") - private String lastligtm1; - -} diff --git a/src/main/java/com/xydl/model/Pd.java b/src/main/java/com/xydl/model/Pd.java deleted file mode 100644 index f5b9450..0000000 --- a/src/main/java/com/xydl/model/Pd.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.xydl.model; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - -import java.sql.Timestamp; -import java.util.Date; - -@Data -public class Pd { - - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String dTime; - - @JsonProperty("DischargeCapacity") - private double waveForm; - - @JsonProperty("DischargePosition") - private double apppadsch; - - @JsonProperty("PulseCount") - private double plsNum; - -} diff --git a/src/main/java/com/xydl/model/RptTemper.java b/src/main/java/com/xydl/model/RptTemper.java deleted file mode 100644 index aefe16f..0000000 --- a/src/main/java/com/xydl/model/RptTemper.java +++ /dev/null @@ -1,42 +0,0 @@ -package com.xydl.model; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; -import java.sql.Timestamp; -import java.util.Date; - -@Data -public class RptTemper { - - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String createTime; - - @JsonProperty("OlTmpA") - private double aoTemper; - - @JsonProperty("OlTmpB") - private double boTemper; - - @JsonProperty("OlTmpC") - private double coTemper; - - @JsonProperty("IlTmpA") - private double aiTemper; - - @JsonProperty("IlTmpB") - private double biTemper; - - @JsonProperty("IlTmpC") - private double ciTemper; - - @JsonProperty("OntologyTmp") - private double boxTemper; - - -} diff --git a/src/main/java/com/xydl/model/Scur.java b/src/main/java/com/xydl/model/Scur.java deleted file mode 100644 index bf38181..0000000 --- a/src/main/java/com/xydl/model/Scur.java +++ /dev/null @@ -1,22 +0,0 @@ -package com.xydl.model; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; -import java.sql.Timestamp; -import java.util.Date; - -@Data -public class Scur { - - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String dTime; - - @JsonProperty("CoreCurrent") - private double currentVal; -} diff --git a/src/main/java/com/xydl/model/Sf6.java b/src/main/java/com/xydl/model/Sf6.java deleted file mode 100644 index ffda062..0000000 --- a/src/main/java/com/xydl/model/Sf6.java +++ /dev/null @@ -1,35 +0,0 @@ -package com.xydl.model; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; -import java.sql.Timestamp; -import java.util.Date; - -@Data -public class Sf6 { - - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String dTime; - - @JsonProperty("Temperature") - private double temp1; - - @JsonProperty("Pressure20C") - private double pressure1; - - @JsonProperty("AbsolutePressure") - private double AbsolutePressure = pressure1 + 900; - - @JsonProperty("Density") - private double md1; - - @JsonProperty("Moisture") - private double pm1; - -} diff --git a/src/main/java/com/xydl/model/Sf6env.java b/src/main/java/com/xydl/model/Sf6env.java deleted file mode 100644 index d6ff49d..0000000 --- a/src/main/java/com/xydl/model/Sf6env.java +++ /dev/null @@ -1,43 +0,0 @@ -package com.xydl.model; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; -import java.sql.Timestamp; -import java.util.Date; - -@Data -public class Sf6env { - - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String dTime; - - @JsonProperty("gas1") - private double gas1; - - @JsonProperty("yq1") - private double yq1; - - @JsonProperty("md1") - private double md1; - - @JsonProperty("pm1") - private double pm1; - - @JsonProperty("gascnt1") - private double gascnt1; - - @JsonProperty("hmcnt1") - private double hmcnt1; - - @JsonProperty("sf6warn1") - private double sf6warn1; - - @JsonProperty("o2warn1") - private double o2warn1; -} diff --git a/src/main/java/com/xydl/model/SuperModel.java b/src/main/java/com/xydl/model/SuperModel.java deleted file mode 100644 index 5eac7c3..0000000 --- a/src/main/java/com/xydl/model/SuperModel.java +++ /dev/null @@ -1,231 +0,0 @@ -package com.xydl.model; - -import com.fasterxml.jackson.annotation.JsonProperty; - -public class SuperModel { - - /** - * EPA - */ - @JsonProperty("SubDeviceID") - private String subDeviceID; - - @JsonProperty("SensorCode") - private String sensorId; - - @JsonProperty("AcquisitionTime") - private String dTime ; - - @JsonProperty("H2") - private double h2ppm; - - @JsonProperty("CH4") - private double ch4ppm; - - @JsonProperty("C2H6") - private double c2h6ppm; - - @JsonProperty("C2H4") - private double c2h4ppm; - - @JsonProperty("C2H2") - private double c2h2ppm; - - @JsonProperty("CO") - private double coppm; - - @JsonProperty("CO2") - private double co2ppm; - - @JsonProperty("O2") - private double o2ppm; - - @JsonProperty("N2") - private double n2ppm; - - @JsonProperty("TotalHydrocarbon") - private double totalHydroCarbon; - - @JsonProperty("GasPress") - private double gaspress; - - @JsonProperty("H2O") - private double h2oppm; - - @JsonProperty("Phase") - private String phase; - - /** - * Eaif - */ - -// @JsonProperty("AcquisitionTime") -// private String captureTime; - - @JsonProperty("MaxTmp") - private double maxTemp; - - @JsonProperty("MinTmp") - private double minTemp; - - @JsonProperty("AvgTmp") - private double avgTemp; - - /** - * Eia - */ - - /** - * Etp - */ - - -// @JsonProperty("MaxTmp") -// private double t1; - - - /** - * Microclimate - */ - - - @JsonProperty("AirTemperature") - private double envTmp; - - @JsonProperty("AirPressure") - private double envPres; - - @JsonProperty("Humidity") - private double envHum; - - @JsonProperty("Precipitation") - private double rnfll; - - @JsonProperty("PrecipitationIntensity") - private double PreciInten = 0; - - @JsonProperty("RadiationIntensity") - private double radiInten = 0; - - /** - * Moa - */ - - - @JsonProperty("SystemVoltage") - private double pt1; - - @JsonProperty("TotalCurrent") - private double lc1; - - @JsonProperty("ResistiveCurrent") - private double rc1; - - @JsonProperty("ActionCount") - private double ligcnt1; - - @JsonProperty("LastActionTime") - private String lastligtm1; - - /** - * Pd - */ - - - @JsonProperty("DischargeCapacity") - private double waveForm; - - @JsonProperty("DischargePosition") - private double apppadsch; - - @JsonProperty("PulseCount") - private double plsNum; - - /** - * RptTemper - */ - - -// @JsonProperty("AcquisitionTime") -// private String createTime; - - @JsonProperty("OlTmpA") - private double aoTemper; - - @JsonProperty("OlTmpB") - private double boTemper; - - @JsonProperty("OlTmpC") - private double coTemper; - - @JsonProperty("IlTmpA") - private double aiTemper; - - @JsonProperty("IlTmpB") - private double biTemper; - - @JsonProperty("IlTmpC") - private double ciTemper; - - @JsonProperty("OntologyTmp") - private double boxTemper; - - /** - * Scur - */ - - - @JsonProperty("CoreCurrent") - private double currentVal; - - /** - * Sf6 - */ - - - @JsonProperty("Temperature") - private double temp1; - - @JsonProperty("Pressure20C") - private double pressure1; - - @JsonProperty("AbsolutePressure") - private double AbsolutePressure = pressure1 + 900; - - @JsonProperty("Density") - private double md1Sf6; - - @JsonProperty("Moisture") - private double pm1Sf6; - - - /** - * Sf6env - */ - - @JsonProperty("gas1") - private double gas1; - - @JsonProperty("yq1") - private double yq1; - - @JsonProperty("md1") - private double md1; - - @JsonProperty("pm1") - private double pm1; - - @JsonProperty("gascnt1") - private double gascnt1; - - @JsonProperty("hmcnt1") - private double hmcnt1; - - @JsonProperty("sf6warn1") - private double sf6warn1; - - @JsonProperty("o2warn1") - private double o2warn1; - - -} diff --git a/src/main/java/com/xydl/service/EiaService.java b/src/main/java/com/xydl/service/EiaService.java deleted file mode 100644 index efb8f7b..0000000 --- a/src/main/java/com/xydl/service/EiaService.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.xydl.service; - -import com.xydl.model.*; - -import java.util.List; -import java.util.Map; - -public interface EiaService { - - List getEaif(); - - List getEia(); - - void getEpa(); - - List getEtp(); - - List getMic(); - - List getMoa(); - - List getPd(); - - List getRptTemper(); - - List getScur(); - - List getSf6(); - - List getSf6env(); - - -} diff --git a/src/main/java/com/xydl/service/impl/EiaServiceImpl.java b/src/main/java/com/xydl/service/impl/EiaServiceImpl.java deleted file mode 100644 index 6ae87fa..0000000 --- a/src/main/java/com/xydl/service/impl/EiaServiceImpl.java +++ /dev/null @@ -1,528 +0,0 @@ -package com.xydl.service.impl; - - -import com.xydl.mapper.*; -import com.xydl.model.*; -import com.xydl.service.EiaService; -import com.xydl.util.CommonUtils; -import com.xydl.util.DataSourceUtils; -import com.xydl.util.FormatUtil; -import com.xydl.util.MqttUtil; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.scheduling.annotation.Scheduled; -import org.springframework.stereotype.Service; - -import java.sql.*; -import java.text.SimpleDateFormat; -import java.util.*; -import java.util.Date; - - -@Service -public class EiaServiceImpl implements EiaService { - private static final Logger logger = LoggerFactory.getLogger(EiaServiceImpl.class); - - private static final String SYNC_TABLE = "sync_tables_info"; - - @Autowired - private EaifMapper eaifMapper; - - @Autowired - private EpaMapper epaMapper; - - @Autowired - private EiaMapper eiaMapper; - - @Autowired - private EtpMapper etpMapper; - - @Autowired - private MicMapper micMapper; - - @Autowired - private MoaMapper moaMapper; - - @Autowired - private PdMapper pdMapper; - - @Autowired - private RptTemperMapper rptTemperMapper; - - @Autowired - private ScurMapper scurMapper; - - @Autowired - private Sf6Mapper sf6Mapper; - - @Autowired - private Sf6envMapper sf6envMapper; - - - @Override - public List getEaif() { - return eaifMapper.getEaif(); -// - } - - @Override - public List getEia() { - return eiaMapper.getEia(); - } - - @Override - public void getEpa() { - List tableNames = epaMapper.getTableNamesBySyncTable(SYNC_TABLE); - -// for(String tableName : tableNames){ -// System.out.println(tableName); -// List eqmids = epaMapper.getEqmidsByTableName(tableName); -// for(int eqmid : eqmids){ -// List epas = epaMapper.getEpa(eqmid); -// String jsonStringEpa = FormatUtil.list2Json(epas); -// MqttUtil.publish2MQTT(jsonStringEpa); -// } -// } - for(String tableName : tableNames){ - String sql = epaMapper.getSqlBySyncTable(SYNC_TABLE,tableName); - List eqmids = epaMapper.getEqmidsByTableName(tableName); - if(sql.contains("?")){ - for(int eqmid : eqmids){ - String newSql = sql.replaceAll("\\?",eqmid+""); - List epas = epaMapper.getData(newSql); - String jsonStringEpa = FormatUtil.list2Json(epas); - MqttUtil.publish2MQTT(jsonStringEpa); - } - } - } - } - - @Override - public List getEtp() { - return etpMapper.getEtp(); - } - - @Override - public List getMic() { - return micMapper.getMicroclimate(); - } - - @Override - public List getMoa() { - return moaMapper.getMoa(); - } - - @Override - public List getPd() { - return pdMapper.getPd(); - } - - @Override - public List getRptTemper() { - return rptTemperMapper.getRptTemper(); - } - - @Override - public List getScur() { - return scurMapper.getScur(); - } - - @Override - public List getSf6() { - return sf6Mapper.getSf6(); - } - - @Override - public List getSf6env() { - return sf6envMapper.getSf6env(); - } - - public List getAllTableName() { - Connection conn = null; - PreparedStatement pstmt = null; - ResultSet rs = null; - List tableNames = new ArrayList<>(); - try { - conn = DataSourceUtils.getConnection(); - String sql = "select table_name from sync_tables_info"; - pstmt = conn.prepareStatement(sql); - rs = pstmt.executeQuery(); - - while(rs.next()){ - tableNames.add(rs.getString("table_name")); - } - } catch (SQLException e) { - logger.error("execute sql exception:", e); - } finally { - DataSourceUtils.closeResource(rs, pstmt, conn); - } - - return tableNames; - } - - public Map getFieldMap(String tableName) { - Connection conn = null; - PreparedStatement pstmt = null; - ResultSet rs = null; - Map fieldsMap = new HashMap<>(); - try { - conn = DataSourceUtils.getConnection(); - String sql = "select sync_fields_info.field_name, sync_fields_info.dest_field_name " + - "from sync_fields_info,sync_tables_info " + - "where sync_fields_info.client_id = 10 and sync_fields_info.table_name = sync_tables_info.table_name and sync_tables_info.table_name=?"; - pstmt = conn.prepareStatement(sql); - pstmt.setString(1, tableName); - rs = pstmt.executeQuery(); - ResultSetMetaData metaData = rs.getMetaData(); - - while(rs.next()){ - fieldsMap.put(rs.getString("field_name"),rs.getString("dest_field_name")); - } - } catch (SQLException e) { - logger.error("execute sql exception:", e); - } finally { - DataSourceUtils.closeResource(rs, pstmt, conn); - } - - return fieldsMap; - } - -// public String getLastRecordTimeSended(String tableName,String deviceId) { -// Connection conn = null; -// PreparedStatement pstmt = null; -// ResultSet rs = null; -// Timestamp timeStamp = null; -// try { -// conn = DataSourceUtils.getConnection(); -// String sql = "select field_val2 from sync_records where table_name =? and devid_val=?"; -// pstmt = conn.prepareStatement(sql); -// pstmt.setString(1, tableName); -// pstmt.setString(2, deviceId); -// rs = pstmt.executeQuery(); -// if(rs.next()){ -// timeStamp = rs.getTimestamp("field_val2"); -// -// } -// } catch (SQLException e) { -// logger.error("execute sql exception:", e); -// } finally { -// DataSourceUtils.closeResource(rs, pstmt, conn); -// } -// -// return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(timeStamp); -// } - -// public List getDeviceID(String tableName) { -// Connection conn = null; -// PreparedStatement pstmt = null; -// ResultSet rs = null; -// String sqlExecuting = null ; -// List deviceIDs = new ArrayList<>(); -// try { -// conn = DataSourceUtils.getConnection(); -// String sql = "select distinct devid_val from sync_records where table_name =?"; -// pstmt = conn.prepareStatement(sql); -// pstmt.setString(1, tableName); -// rs = pstmt.executeQuery(); -// while(rs.next()){ -// deviceIDs.add(rs.getString("devid_val")); -// } -// } catch (SQLException e) { -// logger.error("execute sql exception:", e); -// } finally { -// DataSourceUtils.closeResource(rs, pstmt, conn); -// } -// -// return deviceIDs; -// } - - public Map getDeviceID1(String tableName) { - Connection conn = null; - PreparedStatement pstmt = null; - ResultSet rs = null; - String sqlExecuting = null ; - Map devIDTimeMap = new HashMap<>(); - try { - conn = DataSourceUtils.getConnection(); - String sql = "select devid_val,field_val2 from sync_records where table_name =?"; - pstmt = conn.prepareStatement(sql); - pstmt.setString(1, tableName); - rs = pstmt.executeQuery(); - while(rs.next()){ - devIDTimeMap.put(rs.getString("devid_val"),rs.getString("field_val2")); - } - } catch (SQLException e) { - logger.error("execute sql exception:", e); - } finally { - DataSourceUtils.closeResource(rs, pstmt, conn); - } - - return devIDTimeMap; - } - - public String getSQL(String tableName) { - Connection conn = null; - PreparedStatement pstmt = null; - ResultSet rs = null; - String sqlExecuting = null ; - try { - conn = DataSourceUtils.getConnection(); - String sql = "select * from sync_tables_info where table_name =?"; - pstmt = conn.prepareStatement(sql); - pstmt.setString(1, tableName); - rs = pstmt.executeQuery(); - if(rs.next()){ - sqlExecuting = rs.getString("sql"); - } - } catch (SQLException e) { - logger.error("execute sql exception:", e); - } finally { - DataSourceUtils.closeResource(rs, pstmt, conn); - } - - return sqlExecuting; - } - - public List> getData(String sqlExecuting, String deviceId, String timeStamp) { - Connection conn = null; - PreparedStatement pstmt = null; - ResultSet rs = null; - List> records = new ArrayList<>(); - try { - conn = DataSourceUtils.getConnection(); - String sql = sqlExecuting; - pstmt = conn.prepareStatement(sql); - pstmt.setString(1, deviceId); - pstmt.setString(2,timeStamp); - rs = pstmt.executeQuery(); - int columnCount = rs.getMetaData().getColumnCount(); //获取列的数量 - while(rs.next()){ - Map record = new HashMap<>(); - for (int col = 0; col < columnCount; col++) { - String columnName = rs.getMetaData().getColumnName(col + 1); - String columnValue = rs.getString(columnName); - record.put(columnName,columnValue); - } - records.add(record); - } - } catch (SQLException e) { - logger.error("execute sql exception:", e); - } finally { - DataSourceUtils.closeResource(rs, pstmt, conn); - } - - return records; - } - - -// @Scheduled(initialDelay=1000, fixedRate = 1000 * 3600) //通过@Scheduled声明该方法是计划任务,使用fixedRate属性每隔固定时间执行 -// public void reportRecord(){ -// List allTableNames = getAllTableName(); -// Map> tableFieldMap = new HashMap<>(); -// Map>>>> allTableData = new HashMap<>(); -// for(String tablenName : allTableNames){ -// //if用来测试 -// if(!"data_eaif_h".equals(tablenName)){ -// Map fieldMap = getFieldMap(tablenName); -// tableFieldMap.put(tablenName,fieldMap); -// -// String sqlExecuting = getSQL(tablenName); -// List deviceIDs = getDeviceID(tablenName); -// List>>> dataOfoneTable = new ArrayList<>(); -// for(String deviceID : deviceIDs){ -// Map>> deviceIDMap = new HashMap<>(); -// String timeStamp = getLastRecordTimeSended(tablenName,deviceID); -// List> dataOfoneDeviceID = getData(sqlExecuting,deviceID,timeStamp); -// -// deviceIDMap.put(deviceID,dataOfoneDeviceID); -// dataOfoneTable.add(deviceIDMap); -// } -// allTableData.put(tablenName,dataOfoneTable); -// } -// -// } -// System.out.println("旧数据: "+allTableData); -// System.out.println("==============================="); -// -// -// Map>>>> newAllData = new HashMap<>(); -// for(Map.Entry>>>> dataEntry : allTableData.entrySet()){ -// List>>> newRecords = transformFields(dataEntry.getKey(),tableFieldMap,dataEntry.getValue()); -// newAllData.put(dataEntry.getKey(),newRecords); -// } -// -// for(String tableName : newAllData.keySet()){ -// List>>> records = newAllData.get(tableName); -// String jsonStringData = FormatUtil.list2Json(records); -// if(MqttUtil.publish2MQTT(jsonStringData)){ -// updateLastRecordTimeSended(tableName,records); -// }else{ -// System.out.println("消息推送失败"); -// } -// } -// -// } - - @Scheduled(initialDelay=1000, fixedRate = 1000 * 3600) //通过@Scheduled声明该方法是计划任务,使用fixedRate属性每隔固定时间执行 - public void reportRecord1(){ - List allTableNames = getAllTableName(); - Map> tableFieldMap = new HashMap<>(); - Map>>>> allTableData = new HashMap<>(); - for(String tableName : allTableNames){ - //if用来测试 - if(!"data_eaif_h".equals(tableName)){ - Map fieldMap = getFieldMap(tableName); - String sqlExecuting = getSQL(tableName); - Map devIDLastTimeMap = getDeviceID1(tableName); - for(String deviceID : devIDLastTimeMap.keySet()){ - List> dataOfoneDeviceID = getData(sqlExecuting,deviceID, (String) devIDLastTimeMap.get(deviceID)); - - List> newDataOfoneDeviceID = transformList(fieldMap,dataOfoneDeviceID); - - String jsonStringData = FormatUtil.list2Json(newDataOfoneDeviceID); - if(MqttUtil.publish2MQTT(jsonStringData)){ - updateSyncRecordsTable(tableName,deviceID, (String) devIDLastTimeMap.get(deviceID)); - }else{ - System.out.println("消息推送失败"); - } - } - } - } -// System.out.println("旧数据: "+allTableData); -// System.out.println("==============================="); - - -// Map>>>> newAllData = new HashMap<>(); -// for(Map.Entry>>>> dataEntry : allTableData.entrySet()){ -// List>>> newRecords = transformFields(dataEntry.getKey(),tableFieldMap,dataEntry.getValue()); -// newAllData.put(dataEntry.getKey(),newRecords); -// } - -// for(String tableName : newAllData.keySet()){ -// List>>> records = newAllData.get(tableName); -// String jsonStringData = FormatUtil.list2Json(records); -// if(MqttUtil.publish2MQTT(jsonStringData)){ -// updateLastRecordTimeSended(tableName,records); -// }else{ -// System.out.println("消息推送失败"); -// } -// } - - - } - -// //返回替换字段名的records -// private List>>> transformFields(String recordTableName, Map> tableFieldMap, List>>> records) { -// List>>> newRecords = new ArrayList<>(); -// if(tableFieldMap.containsKey(recordTableName)){ -// for(Map>> record : records ){ -// newRecords.add(transformMore(tableFieldMap.get(recordTableName),record)); -// } -// } -// return newRecords; -// } -// -// private Map>> transformMore(Map fieldMap, Map>> deviceIDDataMap) { -// Map>> newDeviceIDData = new HashMap<>(); -// for(Map.Entry>> entry : deviceIDDataMap.entrySet()){ -// newDeviceIDData.put(entry.getKey(),transformMoreAgain(fieldMap, entry.getValue())); -// } -// return newDeviceIDData; -// } -// -// private List> transformMoreAgain(Map fieldMap, List> deviceIDData) { -// List> newDeviceIDData = new ArrayList<>(); -// for(Map fieldValueMap : deviceIDData){ -// newDeviceIDData.add(transformMoreAgain2(fieldMap,fieldValueMap)); -// } -// return newDeviceIDData; -// } -// -// private Map transformMoreAgain2(Map fieldMap, Map fieldValueMap) { -// Map newFieldValueMap = new HashMap<>(); -// for(String field : fieldMap.keySet()){ -// for(String columnName : fieldValueMap.keySet() ){ -// if(Objects.equals(field,columnName)){ -// newFieldValueMap.put(fieldMap.get(field),fieldValueMap.get(columnName) ); -// } -// } -// } -// return newFieldValueMap; -// } - - private List> transformList(Map fieldMap, List> deviceIDData) { - List> newDeviceIDData = new ArrayList<>(); - for(Map fieldValueMap : deviceIDData){ - newDeviceIDData.add(transformOneRecord(fieldMap,fieldValueMap)); - } - return newDeviceIDData; - } - - private Map transformOneRecord(Map fieldMap, Map fieldValueMap) { - Map newFieldValueMap = new HashMap<>(); - for(String field : fieldMap.keySet()){ - if(fieldValueMap.containsKey(field)){ - newFieldValueMap.put(fieldMap.get(field),fieldValueMap.get(field) ); - } - } - return newFieldValueMap; - } - -// -// public void updateLastRecordTimeSended(String tableName, List>>> records) { -// List deviceIDs = getDeviceID(tableName); -// for(String deviceID : deviceIDs){ -// String lastRecordTimesJustSended = null; -// for(Map>> recordMap : records){ -// if(recordMap.get(deviceID) != null){ -// List> deviceIDData = recordMap.get(deviceID); -// lastRecordTimesJustSended = (String) deviceIDData.get(deviceIDData.size()-1).get("AcquisitionTime"); -// System.out.println(tableName+"表"+deviceID+"最后一条记录时间: "+lastRecordTimesJustSended); -// } -// -// } -//// updateSyncRecordsTable(tableName, deviceID, lastRecordTimesJustSended); -// -// } -// -// } - - - public boolean updateSyncRecordsTable(String tableName, String deviceID, String time) { - Connection conn = null; - PreparedStatement pstmt = null; - ResultSet rs = null; - try { - conn = DataSourceUtils.getConnection(); - String sql = "update sync_records set field_val2 = ? where table_name = ? and devid_val = ?"; - pstmt = conn.prepareStatement(sql); - pstmt.setString(1, time); - pstmt.setString(2, tableName); - pstmt.setString(3, deviceID); - pstmt.executeUpdate(); - - } catch (SQLException e) { - logger.error("execute sql exception:", e); - return false; - } finally { - DataSourceUtils.closeResource(rs, pstmt, conn); - } - return true; - } - - - @Scheduled(fixedDelay = Long.MAX_VALUE) // 用一个非常大的延迟值,确保只执行一次 - public void subScribeSamle() { - System.out.println("subScribe执行一次 "+new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())); - MqttUtil.subScribeMQTT(); - } - - - - - - -} - diff --git a/src/main/java/com/xydl/service/impl/MqttServiceImpl.java b/src/main/java/com/xydl/service/impl/MqttServiceImpl.java new file mode 100644 index 0000000..eb3fb5a --- /dev/null +++ b/src/main/java/com/xydl/service/impl/MqttServiceImpl.java @@ -0,0 +1,349 @@ +package com.xydl.service.impl; + + +import com.xydl.mapper.OperationDB; +import com.xydl.util.DataSourceUtils; +import com.xydl.util.FormatUtil; +import com.xydl.util.MqttUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Service; +import java.sql.*; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.Date; + + +@Service +public class MqttServiceImpl { + private static final Logger logger = LoggerFactory.getLogger(MqttServiceImpl.class); + + @Autowired + OperationDB operationDBMapper; + + private static final String SYNC_TABLE = "sync_tables_info"; + + + public List getAllTableNameFromSyncTable() { + Connection conn = null; + PreparedStatement pstmt = null; + ResultSet rs = null; + List tableNames = new ArrayList<>(); + try { + conn = DataSourceUtils.getConnection(); + String sql = "select table_name from sync_tables_info"; + pstmt = conn.prepareStatement(sql); + rs = pstmt.executeQuery(); + + while(rs.next()){ + tableNames.add(rs.getString("table_name")); + } + } catch (SQLException e) { + logger.error("execute sql exception:", e); + } finally { + DataSourceUtils.closeResource(rs, pstmt, conn); + } + + return tableNames; + } + + public boolean tableNameIfExitsSyncRec(String tableName) { + Connection conn = null; + PreparedStatement pstmt = null; + ResultSet rs = null; + try { + conn = DataSourceUtils.getConnection(); + String sql = "select devid_val from sync_records where table_name = ?"; + pstmt = conn.prepareStatement(sql); + pstmt.setString(1, tableName); + rs = pstmt.executeQuery(); + + if(rs.next()){ + return true; + } + } catch (SQLException e) { + logger.error("execute sql exception:", e); + } finally { + DataSourceUtils.closeResource(rs, pstmt, conn); + } + + return false; + } + + public Map getFieldMap(String tableName) { + Connection conn = null; + PreparedStatement pstmt = null; + ResultSet rs = null; + Map fieldsMap = new HashMap<>(); + try { + conn = DataSourceUtils.getConnection(); + String sql = "select sync_fields_info.field_name, sync_fields_info.dest_field_name " + + "from sync_fields_info,sync_tables_info " + + "where sync_fields_info.client_id = 10 and sync_fields_info.table_name = sync_tables_info.table_name and sync_tables_info.table_name=?"; + pstmt = conn.prepareStatement(sql); + pstmt.setString(1, tableName); + rs = pstmt.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + + while(rs.next()){ + fieldsMap.put(rs.getString("field_name"),rs.getString("dest_field_name")); + } + } catch (SQLException e) { + logger.error("execute sql exception:", e); + } finally { + DataSourceUtils.closeResource(rs, pstmt, conn); + } + + return fieldsMap; + } + + + public Map getDeviceIDAndtime(String tableName) { + Connection conn = null; + PreparedStatement pstmt = null; + ResultSet rs = null; + String sqlExecuting = null ; + Map devIDTimeMap = new HashMap<>(); + try { + conn = DataSourceUtils.getConnection(); + String sql = "select devid_val,field_val2 from sync_records where table_name =?"; + pstmt = conn.prepareStatement(sql); + pstmt.setString(1, tableName); + rs = pstmt.executeQuery(); + while(rs.next()){ + devIDTimeMap.put(rs.getString("devid_val"),rs.getString("field_val2")); + } + } catch (SQLException e) { + logger.error("execute sql exception:", e); + } finally { + DataSourceUtils.closeResource(rs, pstmt, conn); + } + + return devIDTimeMap; + } + + public String getSQL(String tableName) { + Connection conn = null; + PreparedStatement pstmt = null; + ResultSet rs = null; + String sqlExecuting = null ; + try { + conn = DataSourceUtils.getConnection(); + String sql = "select * from sync_tables_info where table_name =?"; + pstmt = conn.prepareStatement(sql); + pstmt.setString(1, tableName); + rs = pstmt.executeQuery(); + if(rs.next()){ + sqlExecuting = rs.getString("sql"); + } + } catch (SQLException e) { + logger.error("execute sql exception:", e); + } finally { + DataSourceUtils.closeResource(rs, pstmt, conn); + } + + return sqlExecuting; + } + + + + public List> getData(String sqlExecuting, String deviceId, String timeStamp) { + Connection conn = null; + PreparedStatement pstmt = null; + ResultSet rs = null; + List> records = new ArrayList<>(); + try { + conn = DataSourceUtils.getConnection(); + String sql = sqlExecuting; + pstmt = conn.prepareStatement(sql); + pstmt.setString(1, deviceId); + pstmt.setString(2,timeStamp); + rs = pstmt.executeQuery(); + int columnCount = rs.getMetaData().getColumnCount(); //获取列的数量 + while(rs.next()){ + Map record = new HashMap<>(); + for (int col = 0; col < columnCount; col++) { + String columnName = rs.getMetaData().getColumnName(col + 1); + String columnValue = rs.getString(columnName); + record.put(columnName,columnValue); + } + records.add(record); + } + } catch (SQLException e) { + logger.error("execute sql exception:", e); + } finally { + DataSourceUtils.closeResource(rs, pstmt, conn); + } + + return records; + } + + public void addEarliestTime2SyncRecord(String tableName, String devId, String lastTime) { + Connection conn = null; + PreparedStatement pstmt = null; + ResultSet rs = null; + try { + conn = DataSourceUtils.getConnection(); + String sql = "insert into sync_records (client_id,table_name,devid_val,field_val2) values (?,?,?,?)"; + pstmt = conn.prepareStatement(sql); + pstmt.setInt(1, 10); + pstmt.setString(2, tableName); + pstmt.setString(3, devId); + pstmt.setString(4, lastTime); + pstmt.executeUpdate(); + } catch (SQLException e) { + logger.error("execute sql exception:", e); + } finally { + DataSourceUtils.closeResource(rs, pstmt, conn); + } + + } + + public List getAllDevId(String tableName) { + Connection conn = null; + PreparedStatement pstmt = null; + ResultSet rs = null; + String sqlExecuting = null ; + List devIDs = new ArrayList<>(); + try { + conn = DataSourceUtils.getConnection(); + String sql = "select distinct eqmid from ?"; + pstmt = conn.prepareStatement(sql); + pstmt.setString(1, tableName); + rs = pstmt.executeQuery(); + while(rs.next()){ + devIDs.add(rs.getString("eqmid")); + } + } catch (SQLException e) { + logger.error("execute sql exception:", e); + } finally { + DataSourceUtils.closeResource(rs, pstmt, conn); + } + + return devIDs; + } + + public String getLastTime(String tableName, String devId) { + Connection conn = null; + PreparedStatement pstmt = null; + ResultSet rs = null; + try { + conn = DataSourceUtils.getConnection(); + String sql = "select d_time from ? where eqmid=? ORDER BY d_time asc limit 1"; + pstmt = conn.prepareStatement(sql); + pstmt.setString(1, tableName); + pstmt.setString(2, devId); + rs = pstmt.executeQuery(); + if(rs.next()){ + return rs.getString("d_time"); + } + } catch (SQLException e) { + logger.error("execute sql exception:", e); + } finally { + DataSourceUtils.closeResource(rs, pstmt, conn); + } + + return null; + } + + + + private List> transformList(Map fieldMap, List> deviceIDData) { + List> newDeviceIDData = new ArrayList<>(); + for(Map fieldValueMap : deviceIDData){ + newDeviceIDData.add(transformOneRecord(fieldMap,fieldValueMap)); + } + return newDeviceIDData; + } + + private Map transformOneRecord(Map fieldMap, Map fieldValueMap) { + Map newFieldValueMap = new HashMap<>(); + for(String field : fieldMap.keySet()){ + if(fieldValueMap.containsKey(field)){ + newFieldValueMap.put(fieldMap.get(field),fieldValueMap.get(field) ); + } + } + return newFieldValueMap; + } + + + + public boolean updateSyncRecordsTable(String tableName, String deviceID, String time) { + Connection conn = null; + PreparedStatement pstmt = null; + ResultSet rs = null; + try { + conn = DataSourceUtils.getConnection(); + String sql = "update sync_records set field_val2 = ? where table_name = ? and devid_val = ?"; + pstmt = conn.prepareStatement(sql); + pstmt.setString(1, time); + pstmt.setString(2, tableName); + pstmt.setString(3, deviceID); + pstmt.executeUpdate(); + + } catch (SQLException e) { + logger.error("execute sql exception:", e); + return false; + } finally { + DataSourceUtils.closeResource(rs, pstmt, conn); + } + return true; + } + + @Scheduled(initialDelay=1000, fixedRate = 1000 * 3600) //通过@Scheduled声明该方法是计划任务,使用fixedRate属性每隔固定时间执行 + public void reportRecord(){ + logger.info("开始执行"); + List allTableNames = getAllTableNameFromSyncTable(); + for(String tableName : allTableNames){ + Map fieldMap = getFieldMap(tableName); + String sqlExecuting = getSQL(tableName); + + Map devIDLastTimeMap = getDeviceIDAndtime(tableName); + for(String deviceID : devIDLastTimeMap.keySet()){ + List> dataOfoneDeviceID = getData(sqlExecuting,deviceID, (String) devIDLastTimeMap.get(deviceID)); + + List> newDataOfoneDeviceID = transformList(fieldMap,dataOfoneDeviceID); + + String jsonStringData = FormatUtil.list2Json(newDataOfoneDeviceID); + logger.info("表{}设备{}推送数据:{}",tableName,deviceID,jsonStringData); + if(MqttUtil.publish2MQTT(jsonStringData)){ + updateSyncRecordsTable(tableName,deviceID, (String) devIDLastTimeMap.get(deviceID)); + logger.info("推送成功"); + }else{ + logger.info("消息推送失败"); + } + } + } + } + + + @Scheduled(fixedDelay = Long.MAX_VALUE) // 用一个非常大的延迟值,确保只执行一次 + public void subScribeSamle() { + logger.info("开始订阅===subScribe执行一次==={}",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())); + MqttUtil.subScribeMQTT(); + } + + @Scheduled(fixedRate = 1000 * 3600) //通过@Scheduled声明该方法是计划任务,使用fixedRate属性每隔固定时间执行 + public void checkDevIdTimer() { + logger.info("每小时检测一次同步的表是否在‘同步记录表’"); + List allTableNames = getAllTableNameFromSyncTable(); + for(String tableName : allTableNames){ + if(!tableNameIfExitsSyncRec(tableName)){ + logger.info("有不存在的表,把所有的devId及最早的时间更新到'同步记录表'"); + List devIds = operationDBMapper.getAllDevId(tableName); + for(String devId : devIds){ + String lastTime = operationDBMapper.getLastTime(tableName,devId); + addEarliestTime2SyncRecord(tableName,devId,lastTime); + } + } + } + } + + + + + +} + diff --git a/src/main/java/com/xydl/util/CommonUtils.java b/src/main/java/com/xydl/util/CommonUtils.java index 5c3a872..58fcaf5 100644 --- a/src/main/java/com/xydl/util/CommonUtils.java +++ b/src/main/java/com/xydl/util/CommonUtils.java @@ -41,135 +41,21 @@ public class CommonUtils { } - private static boolean updateConfig(String name, Map value) { - Connection conn = null; - PreparedStatement pstmt = null; - try { - conn = DataSourceUtils.getConnection(); - String sql = "update global_configuration set value=?::json where name=?"; - pstmt = conn.prepareStatement(sql); - pstmt.setString(1, CommonUtils.getJsonFromObject(value)); - pstmt.setString(2, name); - pstmt.executeUpdate(); - } catch (Exception e) { - logger.error("execute sql exception:", e); - return false; - } finally { - DataSourceUtils.closeResource(pstmt, conn); - } - return true; - } - private static boolean addConfig(String name, Map value) { - Connection conn = null; - PreparedStatement pstmt = null; - try { - conn = DataSourceUtils.getConnection(); - String sql = "INSERT INTO global_configuration(name, value) VALUES (?, ?::json)"; - pstmt = conn.prepareStatement(sql); - pstmt.setString(1, name); - pstmt.setString(2, CommonUtils.getJsonFromObject(value)); - pstmt.executeUpdate(); - } catch (Exception e) { - logger.error("execute sql exception:", e); - return false; - } finally { - DataSourceUtils.closeResource(pstmt, conn); - } - return true; - } + public static ObjectMapper getObjectMapper() { return objectMapper; } - public static T convertJsonToObject(String content, Class valueType) { - if (content == null) { - return null; - } - try { - return objectMapper.readValue(content.getBytes(StandardCharsets.UTF_8), valueType); - } catch (IOException e) { - logger.error("convert json to Object exception:", e); - } - return null; - } - public static List convertJsonToList(String content, Class valueType) { - if (content == null || content.length() == 0) { - return new ArrayList<>(); - } - try { - List list = objectMapper.readValue(content.getBytes(StandardCharsets.UTF_8), new TypeReference>() { - }); - List ret = new ArrayList<>(); - for (T t : list) { - ret.add(objectMapper.convertValue(t, valueType)); - } - return ret; - } catch (IOException e) { - logger.error("convert json to Object exception:", e); - } - return null; - } - public static String getJsonFromObject(Object object) { - String json = null; - try { - json = CommonUtils.getObjectMapper().writeValueAsString(object); - } catch (JsonProcessingException ignored) { - } - return json; - } - public static String getJsonValue(String jsonString, String key) { - Map jsonMap; - try { - jsonMap = objectMapper.readValue(jsonString, new TypeReference>() { - }); - } catch (IOException ignored) { - return null; - } - return jsonMap.get(key); - } - public static String getJsonString(Map map) { - String result = ""; - try { - result = objectMapper.writeValueAsString(map); - } catch (JsonProcessingException ignored) { - } - return result; - } - public static Map getJsonMap(String jsonString) { - Map jsonMap; - try { - jsonMap = objectMapper.readValue(jsonString, new TypeReference>() { - }); - } catch (IOException ignored) { - jsonMap = new HashMap<>(); - } - return jsonMap; - } - public static Map getMapFromJson(String jsonString) { - try { - return objectMapper.readValue(jsonString, new TypeReference>() { - }); - } catch (IOException ignored) { - } - return new HashMap<>(); - } - public static String getPropertyValue(String propertyName) { - Properties properties = loadPropertyFile("sdwan.common.cfg"); - if (properties != null) { - return properties.getProperty(propertyName); - } - return null; - } public static String getPropertyValue(String propertyName, String defaultValue) { Properties properties = loadPropertyFile("sdwan.common.cfg"); diff --git a/src/main/java/com/xydl/util/DataSourceUtils.java b/src/main/java/com/xydl/util/DataSourceUtils.java index 0649ec1..5a1b3f6 100644 --- a/src/main/java/com/xydl/util/DataSourceUtils.java +++ b/src/main/java/com/xydl/util/DataSourceUtils.java @@ -19,15 +19,12 @@ public class DataSourceUtils { } private static void initDataSource() { - Properties properties = CommonUtils.loadPropertyFile("sdwan.datasource.druid.cfg"); - if (properties == null) { - logger.error("init dataSource failed, no config found."); - properties = new Properties(); - properties.setProperty("driverClassName", "com.mysql.cj.jdbc.Driver"); - properties.setProperty("url", "jdbc:mysql://localhost:3306/cac"); - properties.setProperty("username", "root"); - properties.setProperty("password", "root"); - } + Properties properties = new Properties(); + properties.setProperty("driverClassName", "com.mysql.cj.jdbc.Driver"); + properties.setProperty("url", "jdbc:mysql://localhost:3306/cac"); + properties.setProperty("username", "root"); + properties.setProperty("password", "root"); + try { dataSource = DruidDataSourceFactory.createDataSource(properties); url = ((DruidDataSource) dataSource).getUrl(); @@ -42,13 +39,6 @@ public class DataSourceUtils { return dataSource.getConnection(); } - public static String getDatabaseIp() { - try { - return url.substring(url.indexOf("//") + 2, url.lastIndexOf(":")); - } catch (IndexOutOfBoundsException ignored) { - } - return ""; - } // 提供关闭资源的方法【connection是归还到连接池】 // 提供关闭资源的方法 【方法重载】3 dql diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index 1659155..365acd9 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -17,6 +17,12 @@ spring: #mode: always #设置数据源类型C type: com.alibaba.druid.pool.DruidDataSource + #初始化执行sql + initialization-mode: always + schema: + - classpath:/sql/mqtt-schema.sql + continue-on-error: true + mybatis: configuration: map-underscore-to-camel-case: true diff --git a/src/main/resources/com/xydl/mapper/EaifMapper.xml b/src/main/resources/com/xydl/mapper/EaifMapper.xml deleted file mode 100644 index 6d2fe78..0000000 --- a/src/main/resources/com/xydl/mapper/EaifMapper.xml +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert into employee values (#{id},#{lastName},#{email},#{gender},#{department.id}) - - - - update employee - - - last_name=#{employee.lastName}, - - - email=#{employee.email}, - - - gender=#{employee.gender}, - - - d_id=#{employee.department.id} - - - where id = #{id} - - - - delete from employee where id=#{id} - - \ No newline at end of file diff --git a/src/main/resources/com/xydl/mapper/EiaMapper.xml b/src/main/resources/com/xydl/mapper/EiaMapper.xml deleted file mode 100644 index 5372a41..0000000 --- a/src/main/resources/com/xydl/mapper/EiaMapper.xml +++ /dev/null @@ -1,69 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert into employee values (#{id},#{lastName},#{email},#{gender},#{department.id}) - - - - update employee - - - last_name=#{employee.lastName}, - - - email=#{employee.email}, - - - gender=#{employee.gender}, - - - d_id=#{employee.department.id} - - - where id = #{id} - - - - delete from employee where id=#{id} - - \ No newline at end of file diff --git a/src/main/resources/com/xydl/mapper/EpaMapper.xml b/src/main/resources/com/xydl/mapper/EpaMapper.xml deleted file mode 100644 index c5d0e4f..0000000 --- a/src/main/resources/com/xydl/mapper/EpaMapper.xml +++ /dev/null @@ -1,86 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert into employee values (#{id},#{lastName},#{email},#{gender},#{department.id}) - - - - update employee - - - last_name=#{employee.lastName}, - - - email=#{employee.email}, - - - gender=#{employee.gender}, - - - d_id=#{employee.department.id} - - - where id = #{id} - - - - delete from employee where id=#{id} - - \ No newline at end of file diff --git a/src/main/resources/com/xydl/mapper/EtpMapper.xml b/src/main/resources/com/xydl/mapper/EtpMapper.xml deleted file mode 100644 index c80f768..0000000 --- a/src/main/resources/com/xydl/mapper/EtpMapper.xml +++ /dev/null @@ -1,69 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert into employee values (#{id},#{lastName},#{email},#{gender},#{department.id}) - - - - update employee - - - last_name=#{employee.lastName}, - - - email=#{employee.email}, - - - gender=#{employee.gender}, - - - d_id=#{employee.department.id} - - - where id = #{id} - - - - delete from employee where id=#{id} - - \ No newline at end of file diff --git a/src/main/resources/com/xydl/mapper/MicMapper.xml b/src/main/resources/com/xydl/mapper/MicMapper.xml deleted file mode 100644 index 6ad6a16..0000000 --- a/src/main/resources/com/xydl/mapper/MicMapper.xml +++ /dev/null @@ -1,70 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert into employee values (#{id},#{lastName},#{email},#{gender},#{department.id}) - - - - update employee - - - last_name=#{employee.lastName}, - - - email=#{employee.email}, - - - gender=#{employee.gender}, - - - d_id=#{employee.department.id} - - - where id = #{id} - - - - delete from employee where id=#{id} - - \ No newline at end of file diff --git a/src/main/resources/com/xydl/mapper/MoaMapper.xml b/src/main/resources/com/xydl/mapper/MoaMapper.xml deleted file mode 100644 index e7d9436..0000000 --- a/src/main/resources/com/xydl/mapper/MoaMapper.xml +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert into employee values (#{id},#{lastName},#{email},#{gender},#{department.id}) - - - - update employee - - - last_name=#{employee.lastName}, - - - email=#{employee.email}, - - - gender=#{employee.gender}, - - - d_id=#{employee.department.id} - - - where id = #{id} - - - - delete from employee where id=#{id} - - \ No newline at end of file diff --git a/src/main/resources/com/xydl/mapper/OperationDb.xml b/src/main/resources/com/xydl/mapper/OperationDb.xml new file mode 100644 index 0000000..35e7cfd --- /dev/null +++ b/src/main/resources/com/xydl/mapper/OperationDb.xml @@ -0,0 +1,14 @@ + + + + + + + + + + diff --git a/src/main/resources/com/xydl/mapper/PdMapper.xml b/src/main/resources/com/xydl/mapper/PdMapper.xml deleted file mode 100644 index e20de19..0000000 --- a/src/main/resources/com/xydl/mapper/PdMapper.xml +++ /dev/null @@ -1,69 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert into employee values (#{id},#{lastName},#{email},#{gender},#{department.id}) - - - - update employee - - - last_name=#{employee.lastName}, - - - email=#{employee.email}, - - - gender=#{employee.gender}, - - - d_id=#{employee.department.id} - - - where id = #{id} - - - - delete from employee where id=#{id} - - \ No newline at end of file diff --git a/src/main/resources/com/xydl/mapper/RptTemperMapper.xml b/src/main/resources/com/xydl/mapper/RptTemperMapper.xml deleted file mode 100644 index 35b33cb..0000000 --- a/src/main/resources/com/xydl/mapper/RptTemperMapper.xml +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert into employee values (#{id},#{lastName},#{email},#{gender},#{department.id}) - - - - update employee - - - last_name=#{employee.lastName}, - - - email=#{employee.email}, - - - gender=#{employee.gender}, - - - d_id=#{employee.department.id} - - - where id = #{id} - - - - delete from employee where id=#{id} - - \ No newline at end of file diff --git a/src/main/resources/com/xydl/mapper/ScurMapper.xml b/src/main/resources/com/xydl/mapper/ScurMapper.xml deleted file mode 100644 index 4ec60a1..0000000 --- a/src/main/resources/com/xydl/mapper/ScurMapper.xml +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert into employee values (#{id},#{lastName},#{email},#{gender},#{department.id}) - - - - update employee - - - last_name=#{employee.lastName}, - - - email=#{employee.email}, - - - gender=#{employee.gender}, - - - d_id=#{employee.department.id} - - - where id = #{id} - - - - delete from employee where id=#{id} - - \ No newline at end of file diff --git a/src/main/resources/com/xydl/mapper/Sf6Mapper.xml b/src/main/resources/com/xydl/mapper/Sf6Mapper.xml deleted file mode 100644 index c334cd5..0000000 --- a/src/main/resources/com/xydl/mapper/Sf6Mapper.xml +++ /dev/null @@ -1,67 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert into employee values (#{id},#{lastName},#{email},#{gender},#{department.id}) - - - - update employee - - - last_name=#{employee.lastName}, - - - email=#{employee.email}, - - - gender=#{employee.gender}, - - - d_id=#{employee.department.id} - - - where id = #{id} - - - - delete from employee where id=#{id} - - \ No newline at end of file diff --git a/src/main/resources/com/xydl/mapper/Sf6envMapper.xml b/src/main/resources/com/xydl/mapper/Sf6envMapper.xml deleted file mode 100644 index 35d414f..0000000 --- a/src/main/resources/com/xydl/mapper/Sf6envMapper.xml +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert into employee values (#{id},#{lastName},#{email},#{gender},#{department.id}) - - - - update employee - - - last_name=#{employee.lastName}, - - - email=#{employee.email}, - - - gender=#{employee.gender}, - - - d_id=#{employee.department.id} - - - where id = #{id} - - - - delete from employee where id=#{id} - - \ No newline at end of file diff --git a/src/main/resources/sql/mqtt-schema.sql b/src/main/resources/sql/mqtt-schema.sql new file mode 100644 index 0000000..34286b0 --- /dev/null +++ b/src/main/resources/sql/mqtt-schema.sql @@ -0,0 +1,59 @@ +/* + Navicat Premium Data Transfer + + Source Server : 本机 + Source Server Type : MySQL + Source Server Version : 80100 + Source Host : localhost:3306 + Source Schema : cac + + Target Server Type : MySQL + Target Server Version : 80100 + File Encoding : 65001 + + Date: 05/12/2023 10:26:24 +*/ + +SET NAMES utf8mb4; +SET FOREIGN_KEY_CHECKS = 0; + +-- ---------------------------- +-- Table structure for sync_tables_info +-- ---------------------------- +DROP TABLE IF EXISTS `sync_tables_info`; +CREATE TABLE `sync_tables_info` ( + `client_id` int UNSIGNED NOT NULL, + `table_name` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `sql` varchar(2048) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `devid_field_name` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `outer_devid_fname` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `field_name` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `field_type` tinyint NOT NULL DEFAULT 1 COMMENT '1: 整数 2: 字符串 3: 日期 4: 浮点数', + `update_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0), + PRIMARY KEY (`client_id`, `table_name`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic; + +DROP TABLE IF EXISTS `sync_records`; +CREATE TABLE `sync_records` ( + `client_id` int UNSIGNED NOT NULL, + `table_name` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `devid_val` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `field_val1` bigint UNSIGNED NULL DEFAULT 0, + `field_val2` datetime(0) NULL DEFAULT '1999-01-01 01:00:00', + `update_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0), + PRIMARY KEY (`client_id`, `table_name`, `devid_val`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic; + +DROP TABLE IF EXISTS `sync_fields_info`; +CREATE TABLE `sync_fields_info` ( + `id` int UNSIGNED NOT NULL AUTO_INCREMENT, + `client_id` int UNSIGNED NOT NULL, + `table_name` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `field_name` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + `field_type` tinyint UNSIGNED NOT NULL DEFAULT 0, + `dest_field_name` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '', + PRIMARY KEY (`id`) USING BTREE, + INDEX `fields`(`client_id`, `table_name`, `field_name`) USING BTREE +) ENGINE = InnoDB AUTO_INCREMENT = 41 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic; + +SET FOREIGN_KEY_CHECKS = 1;