提交 eaf2b138 authored 作者: junjie's avatar junjie

feat(backend):注释、导包 整理

上级 daf784f2
...@@ -2,8 +2,6 @@ package io.dataease.config; ...@@ -2,8 +2,6 @@ package io.dataease.config;
import com.fit2cloud.autoconfigure.QuartzAutoConfiguration; import com.fit2cloud.autoconfigure.QuartzAutoConfiguration;
import io.dataease.commons.utils.CommonThreadPool; import io.dataease.commons.utils.CommonThreadPool;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.SparkSession;
import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.repository.filerep.KettleFileRepository; import org.pentaho.di.repository.filerep.KettleFileRepository;
......
package io.dataease.listener; package io.dataease.listener;
import io.dataease.base.domain.DatasetTableTask; import io.dataease.base.domain.DatasetTableTask;
import io.dataease.job.sechedule.ScheduleManager;
import io.dataease.service.ScheduleService; import io.dataease.service.ScheduleService;
import io.dataease.service.dataset.DataSetTableTaskService; import io.dataease.service.dataset.DataSetTableTaskService;
import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.boot.context.event.ApplicationReadyEvent;
......
...@@ -3,16 +3,10 @@ package io.dataease.listener; ...@@ -3,16 +3,10 @@ package io.dataease.listener;
import io.dataease.base.domain.DatasetTable; import io.dataease.base.domain.DatasetTable;
import io.dataease.base.domain.DatasetTableExample; import io.dataease.base.domain.DatasetTableExample;
import io.dataease.base.domain.DatasetTableField; import io.dataease.base.domain.DatasetTableField;
import io.dataease.base.domain.DatasetTableFieldExample;
import io.dataease.base.mapper.DatasetTableFieldMapper;
import io.dataease.base.mapper.DatasetTableMapper; import io.dataease.base.mapper.DatasetTableMapper;
import io.dataease.commons.utils.CommonBeanFactory;
import io.dataease.commons.utils.CommonThreadPool; import io.dataease.commons.utils.CommonThreadPool;
import io.dataease.service.dataset.DataSetTableFieldsService; import io.dataease.service.dataset.DataSetTableFieldsService;
import io.dataease.service.spark.SparkCalc; import io.dataease.service.spark.SparkCalc;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SparkSession;
import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.ApplicationListener; import org.springframework.context.ApplicationListener;
import org.springframework.core.annotation.Order; import org.springframework.core.annotation.Order;
......
...@@ -4,7 +4,6 @@ import com.google.gson.Gson; ...@@ -4,7 +4,6 @@ import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken; import com.google.gson.reflect.TypeToken;
import io.dataease.base.domain.*; import io.dataease.base.domain.*;
import io.dataease.base.mapper.ChartViewMapper; import io.dataease.base.mapper.ChartViewMapper;
import io.dataease.base.mapper.DatasetTableFieldMapper;
import io.dataease.commons.utils.AuthUtils; import io.dataease.commons.utils.AuthUtils;
import io.dataease.commons.utils.BeanUtils; import io.dataease.commons.utils.BeanUtils;
import io.dataease.controller.request.chart.ChartViewRequest; import io.dataease.controller.request.chart.ChartViewRequest;
......
...@@ -6,7 +6,6 @@ import io.dataease.dto.chart.ChartViewFieldDTO; ...@@ -6,7 +6,6 @@ import io.dataease.dto.chart.ChartViewFieldDTO;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
...@@ -28,7 +27,6 @@ import org.springframework.stereotype.Service; ...@@ -28,7 +27,6 @@ import org.springframework.stereotype.Service;
import scala.Tuple2; import scala.Tuple2;
import javax.annotation.Resource; import javax.annotation.Resource;
import java.math.BigDecimal;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Base64; import java.util.Base64;
...@@ -51,7 +49,6 @@ public class SparkCalc { ...@@ -51,7 +49,6 @@ public class SparkCalc {
JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext()); JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext());
// Spark SQL Context // Spark SQL Context
// SQLContext sqlContext = CommonBeanFactory.getBean(SQLContext.class);
SQLContext sqlContext = new SQLContext(sparkContext); SQLContext sqlContext = new SQLContext(sparkContext);
sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1")); sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1"));
sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1")); sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1"));
...@@ -82,7 +79,6 @@ public class SparkCalc { ...@@ -82,7 +79,6 @@ public class SparkCalc {
JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext()); JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext());
// Spark SQL Context // Spark SQL Context
// SQLContext sqlContext = CommonBeanFactory.getBean(SQLContext.class);
SQLContext sqlContext = new SQLContext(sparkContext); SQLContext sqlContext = new SQLContext(sparkContext);
sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1")); sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1"));
sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1")); sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1"));
...@@ -99,7 +95,6 @@ public class SparkCalc { ...@@ -99,7 +95,6 @@ public class SparkCalc {
String scanToString = new String(Base64.getEncoder().encode(proto.toByteArray())); String scanToString = new String(Base64.getEncoder().encode(proto.toByteArray()));
// HBase config // HBase config
// Configuration conf = CommonBeanFactory.getBean(Configuration.class);
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration(); org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
conf.set("hbase.zookeeper.quorum", env.getProperty("hbase.zookeeper.quorum")); conf.set("hbase.zookeeper.quorum", env.getProperty("hbase.zookeeper.quorum"));
conf.set("hbase.zookeeper.property.clientPort", env.getProperty("hbase.zookeeper.property.clientPort")); conf.set("hbase.zookeeper.property.clientPort", env.getProperty("hbase.zookeeper.property.clientPort"));
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论