提交 f3fad450 authored 作者: taojinlong's avatar taojinlong

feat: 测试性能

上级 6b285d73
...@@ -22,15 +22,15 @@ public class CommonConfig { ...@@ -22,15 +22,15 @@ public class CommonConfig {
private Environment env; // 保存了配置文件的信息 private Environment env; // 保存了配置文件的信息
private static String root_path = "/opt/dataease/data/kettle/"; private static String root_path = "/opt/dataease/data/kettle/";
@Bean // @Bean
@ConditionalOnMissingBean // @ConditionalOnMissingBean
public org.apache.hadoop.conf.Configuration configuration() { // public org.apache.hadoop.conf.Configuration configuration() {
org.apache.hadoop.conf.Configuration configuration = new org.apache.hadoop.conf.Configuration(); // org.apache.hadoop.conf.Configuration configuration = new org.apache.hadoop.conf.Configuration();
configuration.set("hbase.zookeeper.quorum", env.getProperty("hbase.zookeeper.quorum")); // configuration.set("hbase.zookeeper.quorum", env.getProperty("hbase.zookeeper.quorum"));
configuration.set("hbase.zookeeper.property.clientPort", env.getProperty("hbase.zookeeper.property.clientPort")); // configuration.set("hbase.zookeeper.property.clientPort", env.getProperty("hbase.zookeeper.property.clientPort"));
configuration.set("hbase.client.retries.number", env.getProperty("hbase.client.retries.number", "1")); // configuration.set("hbase.client.retries.number", env.getProperty("hbase.client.retries.number", "1"));
return configuration; // return configuration;
} // }
@Bean @Bean
@ConditionalOnMissingBean @ConditionalOnMissingBean
......
...@@ -7,6 +7,7 @@ import io.dataease.datasource.dto.MysqlConfigrationDTO; ...@@ -7,6 +7,7 @@ import io.dataease.datasource.dto.MysqlConfigrationDTO;
import io.dataease.datasource.dto.SqlServerConfigration; import io.dataease.datasource.dto.SqlServerConfigration;
import io.dataease.datasource.dto.TableFiled; import io.dataease.datasource.dto.TableFiled;
import io.dataease.datasource.request.DatasourceRequest; import io.dataease.datasource.request.DatasourceRequest;
import org.apache.arrow.util.VisibleForTesting;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
...@@ -39,6 +40,23 @@ public class JdbcProvider extends DatasourceProvider { ...@@ -39,6 +40,23 @@ public class JdbcProvider extends DatasourceProvider {
return list; return list;
} }
@VisibleForTesting
public void exec(DatasourceRequest datasourceRequest) throws Exception {
Connection connection = null;
try {
connection = getConnectionFromPool(datasourceRequest);
Statement stat = connection.createStatement();
stat.execute(datasourceRequest.getQuery());
} catch (SQLException e) {
throw new Exception("ERROR:" + e.getMessage(), e);
} catch (Exception e) {
throw new Exception("ERROR:" + e.getMessage(), e);
}finally {
returnSource(connection, datasourceRequest.getDatasource().getId());
}
}
@Override @Override
public ResultSet getDataResultSet(DatasourceRequest datasourceRequest) throws Exception { public ResultSet getDataResultSet(DatasourceRequest datasourceRequest) throws Exception {
ResultSet rs; ResultSet rs;
...@@ -47,7 +65,6 @@ public class JdbcProvider extends DatasourceProvider { ...@@ -47,7 +65,6 @@ public class JdbcProvider extends DatasourceProvider {
connection = getConnectionFromPool(datasourceRequest); connection = getConnectionFromPool(datasourceRequest);
Statement stat = connection.createStatement(); Statement stat = connection.createStatement();
rs = stat.executeQuery(datasourceRequest.getQuery()); rs = stat.executeQuery(datasourceRequest.getQuery());
returnSource(connection, datasourceRequest.getDatasource().getId());
} catch (SQLException e) { } catch (SQLException e) {
throw new Exception("ERROR:" + e.getMessage(), e); throw new Exception("ERROR:" + e.getMessage(), e);
} catch (Exception e) { } catch (Exception e) {
...@@ -66,7 +83,6 @@ public class JdbcProvider extends DatasourceProvider { ...@@ -66,7 +83,6 @@ public class JdbcProvider extends DatasourceProvider {
connection = getConnectionFromPool(datasourceRequest); connection = getConnectionFromPool(datasourceRequest);
Statement stat = connection.createStatement(); Statement stat = connection.createStatement();
ResultSet rs = stat.executeQuery(datasourceRequest.getQuery() + MessageFormat.format(" LIMIT {0}, {1}", (datasourceRequest.getStartPage() - 1) * datasourceRequest.getPageSize(), datasourceRequest.getPageSize())); ResultSet rs = stat.executeQuery(datasourceRequest.getQuery() + MessageFormat.format(" LIMIT {0}, {1}", (datasourceRequest.getStartPage() - 1) * datasourceRequest.getPageSize(), datasourceRequest.getPageSize()));
returnSource(connection, datasourceRequest.getDatasource().getId());
list = fetchResult(rs); list = fetchResult(rs);
} catch (SQLException e) { } catch (SQLException e) {
throw new Exception("ERROR:" + e.getMessage(), e); throw new Exception("ERROR:" + e.getMessage(), e);
...@@ -174,8 +190,6 @@ public class JdbcProvider extends DatasourceProvider { ...@@ -174,8 +190,6 @@ public class JdbcProvider extends DatasourceProvider {
return list; return list;
} }
;
@Override @Override
public void test(DatasourceRequest datasourceRequest) throws Exception { public void test(DatasourceRequest datasourceRequest) throws Exception {
String queryStr = getTablesSql(datasourceRequest); String queryStr = getTablesSql(datasourceRequest);
......
...@@ -33,20 +33,20 @@ public class AppStartReadHBaseListener implements ApplicationListener<Applicatio ...@@ -33,20 +33,20 @@ public class AppStartReadHBaseListener implements ApplicationListener<Applicatio
@Override @Override
public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) { public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) {
System.out.println("================= Read HBase start ================="); // System.out.println("================= Read HBase start =================");
// 项目启动,从数据集中找到定时抽取的表,从HBase中读取放入缓存 // // 项目启动,从数据集中找到定时抽取的表,从HBase中读取放入缓存
DatasetTableExample datasetTableExample = new DatasetTableExample(); // DatasetTableExample datasetTableExample = new DatasetTableExample();
datasetTableExample.createCriteria().andModeEqualTo(1); // datasetTableExample.createCriteria().andModeEqualTo(1);
List<DatasetTable> datasetTables = datasetTableMapper.selectByExampleWithBLOBs(datasetTableExample); // List<DatasetTable> datasetTables = datasetTableMapper.selectByExampleWithBLOBs(datasetTableExample);
for (DatasetTable table : datasetTables) { // for (DatasetTable table : datasetTables) {
// commonThreadPool.addTask(() -> { //// commonThreadPool.addTask(() -> {
try { // try {
List<DatasetTableField> fields = dataSetTableFieldsService.getFieldsByTableId(table.getId()); // List<DatasetTableField> fields = dataSetTableFieldsService.getFieldsByTableId(table.getId());
sparkCalc.getHBaseDataAndCache(table.getId(), fields); // sparkCalc.getHBaseDataAndCache(table.getId(), fields);
} catch (Exception e) { // } catch (Exception e) {
e.printStackTrace(); // e.printStackTrace();
} // }
// }); //// });
} // }
} }
} }
...@@ -41,6 +41,7 @@ import java.util.List; ...@@ -41,6 +41,7 @@ import java.util.List;
@Service @Service
public class SparkCalc { public class SparkCalc {
private static String column_family = "dataease"; private static String column_family = "dataease";
private static String data_path = "/opt/dataease/data/db/";
@Resource @Resource
private Environment env; // 保存了配置文件的信息 private Environment env; // 保存了配置文件的信息
...@@ -54,12 +55,13 @@ public class SparkCalc { ...@@ -54,12 +55,13 @@ public class SparkCalc {
sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1")); sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1"));
sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1")); sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1"));
Dataset<Row> dataFrame = CacheUtil.getInstance().getCacheData(hTable); Dataset<Row> dataFrame = getData(sparkContext, sqlContext, hTable, fields);
if (ObjectUtils.isEmpty(dataFrame)) { // Dataset<Row> dataFrame = CacheUtil.getInstance().getCacheData(hTable);
dataFrame = getHBaseDataAndCache(sparkContext, sqlContext, hTable, fields); // if (ObjectUtils.isEmpty(dataFrame)) {
} // dataFrame = getData(sparkContext, sqlContext, hTable, fields);
// }
dataFrame.createOrReplaceTempView(tmpTable); dataFrame.createOrReplaceTempView( tmpTable);
Dataset<Row> sql = sqlContext.sql(getSQL(xAxis, yAxis, tmpTable, requestList)); Dataset<Row> sql = sqlContext.sql(getSQL(xAxis, yAxis, tmpTable, requestList));
// transform // transform
List<String[]> data = new ArrayList<>(); List<String[]> data = new ArrayList<>();
...@@ -86,6 +88,69 @@ public class SparkCalc { ...@@ -86,6 +88,69 @@ public class SparkCalc {
return getHBaseDataAndCache(sparkContext, sqlContext, hTable, fields); return getHBaseDataAndCache(sparkContext, sqlContext, hTable, fields);
} }
public Dataset<Row> getData(JavaSparkContext sparkContext, SQLContext sqlContext, String tableId, List<DatasetTableField> fields) throws Exception {
fields.sort((o1, o2) -> {
if (o1.getOriginName() == null) {
return -1;
}
if (o2.getOriginName() == null) {
return 1;
}
return o1.getOriginName().compareTo(o2.getOriginName());
});
JavaRDD<String> pairRDD = sparkContext.textFile(data_path + tableId + ".txt");
JavaRDD<Row> rdd = pairRDD.mapPartitions( (FlatMapFunction<java.util.Iterator<String>, Row>) tuple2Iterator -> {
List<Row> iterator = new ArrayList<>();
while (tuple2Iterator.hasNext()) {
String[] items = tuple2Iterator.next().split(";");
List<Object> list = new ArrayList<>();
for(int i=0; i<items.length; i++){
String l = items[i];
DatasetTableField x = fields.get(i);
if (x.getDeType() == 0 || x.getDeType() == 1) {
list.add(l);
} else if (x.getDeType() == 2) {
if (StringUtils.isEmpty(l)) {
l = "0";
}
if (StringUtils.equalsIgnoreCase(l,"Y")) {
l = "1";
}
if (StringUtils.equalsIgnoreCase(l,"N")) {
l = "0";
}
list.add(Long.valueOf(l));
} else if (x.getDeType() == 3) {
if (StringUtils.isEmpty(l)) {
l = "0.0";
}
list.add(Double.valueOf(l));
}
}
iterator.add(RowFactory.create(list.toArray()));
}
return iterator.iterator();
});
List<StructField> structFields = new ArrayList<>();
// struct顺序要与rdd顺序一致
fields.forEach(x -> {
if (x.getDeType() == 0 || x.getDeType() == 1) {
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.StringType, true));
} else if (x.getDeType() == 2) {
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.LongType, true));
} else if (x.getDeType() == 3) {
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.DoubleType, true));
}
});
StructType structType = DataTypes.createStructType(structFields);
Dataset<Row> dataFrame = sqlContext.createDataFrame(rdd, structType);
return dataFrame;
}
public Dataset<Row> getHBaseDataAndCache(JavaSparkContext sparkContext, SQLContext sqlContext, String hTable, List<DatasetTableField> fields) throws Exception { public Dataset<Row> getHBaseDataAndCache(JavaSparkContext sparkContext, SQLContext sqlContext, String hTable, List<DatasetTableField> fields) throws Exception {
Scan scan = new Scan(); Scan scan = new Scan();
scan.addFamily(Bytes.toBytes(column_family)); scan.addFamily(Bytes.toBytes(column_family));
...@@ -145,7 +210,7 @@ public class SparkCalc { ...@@ -145,7 +210,7 @@ public class SparkCalc {
StructType structType = DataTypes.createStructType(structFields); StructType structType = DataTypes.createStructType(structFields);
Dataset<Row> dataFrame = sqlContext.createDataFrame(rdd, structType).persist(StorageLevel.MEMORY_AND_DISK_SER()); Dataset<Row> dataFrame = sqlContext.createDataFrame(rdd, structType).persist(StorageLevel.MEMORY_AND_DISK_SER());
CacheUtil.getInstance().addCacheData(hTable, dataFrame); // CacheUtil.getInstance().addCacheData(hTable, dataFrame);
dataFrame.count(); dataFrame.count();
return dataFrame; return dataFrame;
} }
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论