Commit 070f9a9f5d4e47d0e0f96d333d2f6c2de34ce529

Authored by sh
1 parent ed7d30d0

修改Spark 入库时 id为int

juvenile-prosecution-boot/jeecg-boot-module-system/src/main/java/org/jeecg/modules/handle/service/SparkEtlService.java
@@ -61,9 +61,9 @@ public class SparkEtlService implements Serializable { @@ -61,9 +61,9 @@ public class SparkEtlService implements Serializable {
61 //SaveMode.Ignore 若表不存在,则创建表,并存入数据;若表存在的情况下,直接跳过数据的存储,不会报错 61 //SaveMode.Ignore 若表不存在,则创建表,并存入数据;若表存在的情况下,直接跳过数据的存储,不会报错
62 private SaveMode SPARK_MODE = SaveMode.Append; 62 private SaveMode SPARK_MODE = SaveMode.Append;
63 63
64 - private String TABLE_MINOR = "mr_minor_copy1";  
65 - private String TABLE_FAMILY = "mr_family_copy1";  
66 - private String TABLE_SCHOOL = "mr_school_copy1"; 64 + private String TABLE_MINOR = "mr_minor";
  65 + private String TABLE_FAMILY = "mr_family";
  66 + private String TABLE_SCHOOL = "mr_school";
67 67
68 @PostConstruct 68 @PostConstruct
69 private void init() { 69 private void init() {
@@ -147,7 +147,7 @@ public class SparkEtlService implements Serializable { @@ -147,7 +147,7 @@ public class SparkEtlService implements Serializable {
147 JavaRDD<Row> rowRDD = minorsJavaRDD.zipWithUniqueId().map(new Function<Tuple2<Minor, Long>, Row>() { 147 JavaRDD<Row> rowRDD = minorsJavaRDD.zipWithUniqueId().map(new Function<Tuple2<Minor, Long>, Row>() {
148 @Override 148 @Override
149 public Row call(Tuple2<Minor, Long> v1) throws Exception { 149 public Row call(Tuple2<Minor, Long> v1) throws Exception {
150 - String id = v1._2.toString(); 150 + long id = v1._2;
151 Minor minor = v1._1; 151 Minor minor = v1._1;
152 String name = minor.getName(); 152 String name = minor.getName();
153 String sys_org_code = minor.getSysOrgCode(); 153 String sys_org_code = minor.getSysOrgCode();
@@ -197,7 +197,7 @@ public class SparkEtlService implements Serializable { @@ -197,7 +197,7 @@ public class SparkEtlService implements Serializable {
197 // schemaFields.add(DataTypes.createStructField("updateTime", DataTypes.StringType, true)); 197 // schemaFields.add(DataTypes.createStructField("updateTime", DataTypes.StringType, true));
198 198
199 List<StructField> schemaFields = new ArrayList<StructField>(); 199 List<StructField> schemaFields = new ArrayList<StructField>();
200 - schemaFields.add(DataTypes.createStructField("id", DataTypes.StringType, false)); 200 + schemaFields.add(DataTypes.createStructField("id", DataTypes.IntegerType, false));
201 schemaFields.add(DataTypes.createStructField("name", DataTypes.StringType, true)); 201 schemaFields.add(DataTypes.createStructField("name", DataTypes.StringType, true));
202 schemaFields.add(DataTypes.createStructField("sys_org_code", DataTypes.StringType, true)); 202 schemaFields.add(DataTypes.createStructField("sys_org_code", DataTypes.StringType, true));
203 schemaFields.add(DataTypes.createStructField("household_num", DataTypes.StringType, true)); 203 schemaFields.add(DataTypes.createStructField("household_num", DataTypes.StringType, true));
@@ -243,7 +243,7 @@ public class SparkEtlService implements Serializable { @@ -243,7 +243,7 @@ public class SparkEtlService implements Serializable {
243 JavaRDD<Row> rowRDD = familiesJavaRDD.zipWithUniqueId().map(new Function<Tuple2<Family, Long>, Row>() { 243 JavaRDD<Row> rowRDD = familiesJavaRDD.zipWithUniqueId().map(new Function<Tuple2<Family, Long>, Row>() {
244 @Override 244 @Override
245 public Row call(Tuple2<Family, Long> v1) throws Exception { 245 public Row call(Tuple2<Family, Long> v1) throws Exception {
246 - String id = v1._2.toString(); 246 + long id = v1._2;
247 Family family = v1._1; 247 Family family = v1._1;
248 String name = family.getName(); 248 String name = family.getName();
249 String sys_org_code = family.getSysOrgCode(); 249 String sys_org_code = family.getSysOrgCode();
@@ -281,7 +281,7 @@ public class SparkEtlService implements Serializable { @@ -281,7 +281,7 @@ public class SparkEtlService implements Serializable {
281 // schemaFields.add(DataTypes.createStructField("reason", DataTypes.StringType, true)); 281 // schemaFields.add(DataTypes.createStructField("reason", DataTypes.StringType, true));
282 // schemaFields.add(DataTypes.createStructField("other", DataTypes.StringType, true)); 282 // schemaFields.add(DataTypes.createStructField("other", DataTypes.StringType, true));
283 // schemaFields.add(DataTypes.createStructField("createTime", DataTypes.StringType, true)); 283 // schemaFields.add(DataTypes.createStructField("createTime", DataTypes.StringType, true));
284 - schemaFields.add(DataTypes.createStructField("id", DataTypes.StringType, false)); 284 + schemaFields.add(DataTypes.createStructField("id", DataTypes.IntegerType, false));
285 schemaFields.add(DataTypes.createStructField("name", DataTypes.StringType, true)); 285 schemaFields.add(DataTypes.createStructField("name", DataTypes.StringType, true));
286 schemaFields.add(DataTypes.createStructField("sys_org_code", DataTypes.StringType, true)); 286 schemaFields.add(DataTypes.createStructField("sys_org_code", DataTypes.StringType, true));
287 schemaFields.add(DataTypes.createStructField("household_num", DataTypes.StringType, true)); 287 schemaFields.add(DataTypes.createStructField("household_num", DataTypes.StringType, true));
@@ -327,7 +327,7 @@ public class SparkEtlService implements Serializable { @@ -327,7 +327,7 @@ public class SparkEtlService implements Serializable {
327 JavaRDD<Row> rowRDD = mrSchoolsJavaRDD.zipWithUniqueId().map(new Function<Tuple2<MrSchool, Long>, Row>() { 327 JavaRDD<Row> rowRDD = mrSchoolsJavaRDD.zipWithUniqueId().map(new Function<Tuple2<MrSchool, Long>, Row>() {
328 @Override 328 @Override
329 public Row call(Tuple2<MrSchool, Long> v1) throws Exception { 329 public Row call(Tuple2<MrSchool, Long> v1) throws Exception {
330 - String id = v1._2.toString(); 330 + long id = v1._2;
331 MrSchool mrSchool = v1._1; 331 MrSchool mrSchool = v1._1;
332 String name = mrSchool.getName(); 332 String name = mrSchool.getName();
333 String sys_org_code = mrSchool.getSysOrgCode(); 333 String sys_org_code = mrSchool.getSysOrgCode();
@@ -370,7 +370,7 @@ public class SparkEtlService implements Serializable { @@ -370,7 +370,7 @@ public class SparkEtlService implements Serializable {
370 // schemaFields.add(DataTypes.createStructField("phoneTwo", DataTypes.StringType, true)); 370 // schemaFields.add(DataTypes.createStructField("phoneTwo", DataTypes.StringType, true));
371 // schemaFields.add(DataTypes.createStructField("phone", DataTypes.StringType, true)); 371 // schemaFields.add(DataTypes.createStructField("phone", DataTypes.StringType, true));
372 // schemaFields.add(DataTypes.createStructField("createTime", DataTypes.StringType, true)); 372 // schemaFields.add(DataTypes.createStructField("createTime", DataTypes.StringType, true));
373 - schemaFields.add(DataTypes.createStructField("id", DataTypes.StringType, false)); 373 + schemaFields.add(DataTypes.createStructField("id", DataTypes.IntegerType, false));
374 schemaFields.add(DataTypes.createStructField("name", DataTypes.StringType, true)); 374 schemaFields.add(DataTypes.createStructField("name", DataTypes.StringType, true));
375 schemaFields.add(DataTypes.createStructField("sys_org_code", DataTypes.StringType, true)); 375 schemaFields.add(DataTypes.createStructField("sys_org_code", DataTypes.StringType, true));
376 schemaFields.add(DataTypes.createStructField("identity", DataTypes.StringType, true)); 376 schemaFields.add(DataTypes.createStructField("identity", DataTypes.StringType, true));