Skip to content
项目
群组
代码片段
帮助
当前项目
正在载入...
登录 / 注册
切换导航面板
D
dataease
项目
项目
详情
活动
周期分析
仓库
仓库
文件
提交
分支
标签
贡献者
图表
比较
统计图
议题
0
议题
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
CI / CD
CI / CD
流水线
作业
日程
统计图
Wiki
Wiki
代码片段
代码片段
成员
成员
折叠边栏
关闭边栏
活动
图像
聊天
创建新问题
作业
提交
问题看板
Open sidebar
zhu
dataease
Commits
08a193d4
提交
08a193d4
authored
6月 09, 2021
作者:
taojinlong
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
feat: 收取数据后,删掉 kettle 文件
上级
943d753b
隐藏空白字符变更
内嵌
并排
正在显示
6 个修改的文件
包含
210 行增加
和
128 行删除
+210
-128
DataSetTableTaskLogService.java
.../dataease/service/dataset/DataSetTableTaskLogService.java
+1
-0
DataSetTableTaskService.java
.../io/dataease/service/dataset/DataSetTableTaskService.java
+11
-1
ExtractDataService.java
.../java/io/dataease/service/dataset/ExtractDataService.java
+193
-127
messages_en_US.properties
backend/src/main/resources/i18n/messages_en_US.properties
+2
-0
messages_zh_CN.properties
backend/src/main/resources/i18n/messages_zh_CN.properties
+1
-0
messages_zh_TW.properties
backend/src/main/resources/i18n/messages_zh_TW.properties
+2
-0
没有找到文件。
backend/src/main/java/io/dataease/service/dataset/DataSetTableTaskLogService.java
浏览文件 @
08a193d4
...
...
@@ -69,6 +69,7 @@ public class DataSetTableTaskLogService {
if
(
StringUtils
.
isNotEmpty
(
datasetTableTaskLog
.
getTaskId
())){
criteria
.
andTaskIdEqualTo
(
datasetTableTaskLog
.
getTaskId
());
}
example
.
setOrderByClause
(
"create_time desc"
);
return
datasetTableTaskLogMapper
.
selectByExampleWithBLOBs
(
example
);
}
}
backend/src/main/java/io/dataease/service/dataset/DataSetTableTaskService.java
浏览文件 @
08a193d4
...
...
@@ -7,6 +7,7 @@ import io.dataease.commons.constants.ScheduleType;
import
io.dataease.controller.request.dataset.DataSetTaskRequest
;
import
io.dataease.i18n.Translator
;
import
io.dataease.service.ScheduleService
;
import
org.apache.commons.collections4.CollectionUtils
;
import
org.apache.commons.lang3.ObjectUtils
;
import
org.apache.commons.lang3.StringUtils
;
import
org.quartz.CronExpression
;
...
...
@@ -61,7 +62,16 @@ public class DataSetTableTaskService {
datasetTableTask
.
setCreateTime
(
System
.
currentTimeMillis
());
// SIMPLE 类型,提前占位
if
(
datasetTableTask
.
getRate
().
equalsIgnoreCase
(
ScheduleType
.
SIMPLE
.
toString
()))
{
if
(
extractDataService
.
updateSyncStatus
(
dataSetTableService
.
get
(
datasetTableTask
.
getTableId
())))
{
if
(
datasetTableTask
.
getType
().
equalsIgnoreCase
(
"add_scope"
)){
DatasetTableTaskLog
request
=
new
DatasetTableTaskLog
();
request
.
setTableId
(
datasetTableTask
.
getTableId
());
request
.
setStatus
(
JobStatus
.
Completed
.
name
());
List
<
DatasetTableTaskLog
>
datasetTableTaskLogs
=
dataSetTableTaskLogService
.
select
(
request
);
if
(
CollectionUtils
.
isEmpty
(
datasetTableTaskLogs
))
{
throw
new
Exception
(
Translator
.
get
(
"i18n_not_exec_add_sync"
));
}
}
if
(
extractDataService
.
updateSyncStatusIsNone
(
dataSetTableService
.
get
(
datasetTableTask
.
getTableId
())))
{
throw
new
Exception
(
Translator
.
get
(
"i18n_sync_job_exists"
));
}
else
{
//write log
...
...
backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java
浏览文件 @
08a193d4
...
...
@@ -130,72 +130,7 @@ public class ExtractDataService {
"fi\n"
+
"rm -rf %s\n"
;
private
String
createDorisTablColumnSql
(
List
<
DatasetTableField
>
datasetTableFields
)
{
String
Column_Fields
=
"dataease_uuid varchar(50), `"
;
for
(
DatasetTableField
datasetTableField
:
datasetTableFields
)
{
Column_Fields
=
Column_Fields
+
datasetTableField
.
getDataeaseName
()
+
"` "
;
switch
(
datasetTableField
.
getDeExtractType
())
{
case
0
:
if
(
datasetTableField
.
getSize
()
>
65533
)
{
Column_Fields
=
Column_Fields
+
"varchar(65533)"
+
",`"
;
}
else
{
Column_Fields
=
Column_Fields
+
"varchar(lenth)"
.
replace
(
"lenth"
,
String
.
valueOf
(
datasetTableField
.
getSize
()))
+
",`"
;
}
break
;
case
1
:
Column_Fields
=
Column_Fields
+
"varchar(lenth)"
.
replace
(
"lenth"
,
String
.
valueOf
(
datasetTableField
.
getSize
()))
+
",`"
;
break
;
case
2
:
Column_Fields
=
Column_Fields
+
"bigint(lenth)"
.
replace
(
"lenth"
,
String
.
valueOf
(
datasetTableField
.
getSize
()))
+
",`"
;
break
;
case
3
:
Column_Fields
=
Column_Fields
+
"DOUBLE"
+
",`"
;
break
;
case
4
:
Column_Fields
=
Column_Fields
+
"TINYINT(lenth)"
.
replace
(
"lenth"
,
String
.
valueOf
(
datasetTableField
.
getSize
()))
+
",`"
;
break
;
default
:
Column_Fields
=
Column_Fields
+
"varchar(lenth)"
.
replace
(
"lenth"
,
String
.
valueOf
(
datasetTableField
.
getSize
()))
+
",`"
;
break
;
}
}
Column_Fields
=
Column_Fields
.
substring
(
0
,
Column_Fields
.
length
()
-
2
);
Column_Fields
=
"("
+
Column_Fields
+
")\n"
;
return
Column_Fields
;
}
private
void
createDorisTable
(
String
dorisTableName
,
String
dorisTablColumnSql
)
throws
Exception
{
Datasource
dorisDatasource
=
(
Datasource
)
CommonBeanFactory
.
getBean
(
"DorisDatasource"
);
JdbcProvider
jdbcProvider
=
CommonBeanFactory
.
getBean
(
JdbcProvider
.
class
);
DatasourceRequest
datasourceRequest
=
new
DatasourceRequest
();
datasourceRequest
.
setDatasource
(
dorisDatasource
);
datasourceRequest
.
setQuery
(
creatTableSql
.
replace
(
"TABLE_NAME"
,
dorisTableName
).
replace
(
"Column_Fields"
,
dorisTablColumnSql
));
jdbcProvider
.
exec
(
datasourceRequest
);
}
private
void
dropDorisTable
(
String
dorisTableName
)
{
try
{
Datasource
dorisDatasource
=
(
Datasource
)
CommonBeanFactory
.
getBean
(
"DorisDatasource"
);
JdbcProvider
jdbcProvider
=
CommonBeanFactory
.
getBean
(
JdbcProvider
.
class
);
DatasourceRequest
datasourceRequest
=
new
DatasourceRequest
();
datasourceRequest
.
setDatasource
(
dorisDatasource
);
datasourceRequest
.
setQuery
(
dropTableSql
.
replace
(
"TABLE_NAME"
,
dorisTableName
));
jdbcProvider
.
exec
(
datasourceRequest
);
}
catch
(
Exception
ignore
){}
}
private
void
replaceTable
(
String
dorisTableName
)
throws
Exception
{
Datasource
dorisDatasource
=
(
Datasource
)
CommonBeanFactory
.
getBean
(
"DorisDatasource"
);
JdbcProvider
jdbcProvider
=
CommonBeanFactory
.
getBean
(
JdbcProvider
.
class
);
;
DatasourceRequest
datasourceRequest
=
new
DatasourceRequest
();
datasourceRequest
.
setDatasource
(
dorisDatasource
);
datasourceRequest
.
setQuery
(
"ALTER TABLE DORIS_TABLE REPLACE WITH TABLE DORIS_TMP_TABLE PROPERTIES('swap' = 'false');"
.
replace
(
"DORIS_TABLE"
,
dorisTableName
).
replace
(
"DORIS_TMP_TABLE"
,
DorisTableUtils
.
dorisTmpName
(
dorisTableName
)));
jdbcProvider
.
exec
(
datasourceRequest
);
}
public
synchronized
boolean
updateSyncStatus
(
DatasetTable
datasetTable
){
public
synchronized
boolean
updateSyncStatusIsNone
(
DatasetTable
datasetTable
){
datasetTable
.
setSyncStatus
(
JobStatus
.
Underway
.
name
());
DatasetTableExample
example
=
new
DatasetTableExample
();
example
.
createCriteria
().
andIdEqualTo
(
datasetTable
.
getId
());
...
...
@@ -213,41 +148,45 @@ public class ExtractDataService {
}
DatasetTableTask
datasetTableTask
=
datasetTableTaskMapper
.
selectByPrimaryKey
(
taskId
);
boolean
isCronJob
=
(
datasetTableTask
!=
null
&&
datasetTableTask
.
getRate
().
equalsIgnoreCase
(
ScheduleType
.
CRON
.
toString
()));
if
(
updateSyncStatus
(
datasetTable
)
&&
isCronJob
){
if
(
updateSyncStatus
IsNone
(
datasetTable
)
&&
isCronJob
){
LogUtil
.
info
(
"Skip synchronization task for table : "
+
datasetTableId
);
return
;
}
DatasetTableTaskLog
datasetTableTaskLog
=
new
DatasetTableTaskLog
();
UpdateType
updateType
=
UpdateType
.
valueOf
(
type
);
Datasource
datasource
=
new
Datasource
();
try
{
if
(
context
!=
null
){
datasetTable
.
setQrtzInstance
(
context
.
getFireInstanceId
());
datasetTableMapper
.
updateByPrimaryKeySelective
(
datasetTable
);
if
(
context
!=
null
){
datasetTable
.
setQrtzInstance
(
context
.
getFireInstanceId
());
datasetTableMapper
.
updateByPrimaryKeySelective
(
datasetTable
);
}
if
(
StringUtils
.
isNotEmpty
(
datasetTable
.
getDataSourceId
()))
{
datasource
=
datasourceMapper
.
selectByPrimaryKey
(
datasetTable
.
getDataSourceId
());
}
else
{
datasource
.
setType
(
datasetTable
.
getType
());
}
List
<
DatasetTableField
>
datasetTableFields
=
dataSetTableFieldsService
.
list
(
DatasetTableField
.
builder
().
tableId
(
datasetTable
.
getId
()).
build
());
datasetTableFields
.
sort
((
o1
,
o2
)
->
{
if
(
o1
.
getColumnIndex
()
==
null
)
{
return
-
1
;
}
if
(
StringUtils
.
isNotEmpty
(
datasetTable
.
getDataSourceId
()))
{
datasource
=
datasourceMapper
.
selectByPrimaryKey
(
datasetTable
.
getDataSourceId
());
}
else
{
datasource
.
setType
(
datasetTable
.
getType
());
if
(
o2
.
getColumnIndex
()
==
null
)
{
return
1
;
}
return
o1
.
getColumnIndex
().
compareTo
(
o2
.
getColumnIndex
());
});
String
dorisTablColumnSql
=
createDorisTablColumnSql
(
datasetTableFields
);
List
<
DatasetTableField
>
datasetTableFields
=
dataSetTableFieldsService
.
list
(
DatasetTableField
.
builder
().
tableId
(
datasetTable
.
getId
()).
build
());
datasetTableFields
.
sort
((
o1
,
o2
)
->
{
if
(
o1
.
getColumnIndex
()
==
null
)
{
return
-
1
;
}
if
(
o2
.
getColumnIndex
()
==
null
)
{
return
1
;
}
return
o1
.
getColumnIndex
().
compareTo
(
o2
.
getColumnIndex
());
});
String
dorisTablColumnSql
=
createDorisTablColumnSql
(
datasetTableFields
);
switch
(
updateType
)
{
// 全量更新
case
all_scope:
switch
(
updateType
)
{
case
all_scope:
// 全量更新
try
{
if
(
datasource
.
getType
().
equalsIgnoreCase
(
"excel"
)){
datasetTableTaskLog
=
writeDatasetTableTaskLog
(
datasetTableTaskLog
,
datasetTableId
,
null
);
}
else
{
}
if
(
datasetTableTask
!=
null
&&
datasetTableTask
.
getRate
().
equalsIgnoreCase
(
ScheduleType
.
CRON
.
toString
()))
{
datasetTableTaskLog
=
writeDatasetTableTaskLog
(
datasetTableTaskLog
,
datasetTableId
,
taskId
);
}
if
(
datasetTableTask
!=
null
&&
datasetTableTask
.
getRate
().
equalsIgnoreCase
(
ScheduleType
.
SIMPLE
.
toString
()))
{
datasetTableTaskLog
=
getDatasetTableTaskLog
(
datasetTableTaskLog
,
datasetTableId
,
taskId
);
}
createDorisTable
(
DorisTableUtils
.
dorisName
(
datasetTableId
),
dorisTablColumnSql
);
...
...
@@ -256,18 +195,30 @@ public class ExtractDataService {
generateJobFile
(
"all_scope"
,
datasetTable
,
String
.
join
(
","
,
datasetTableFields
.
stream
().
map
(
DatasetTableField:
:
getDataeaseName
).
collect
(
Collectors
.
toList
())));
extractData
(
datasetTable
,
"all_scope"
);
replaceTable
(
DorisTableUtils
.
dorisName
(
datasetTableId
));
datasetTableTaskLog
.
setStatus
(
JobStatus
.
Completed
.
name
());
datasetTableTaskLog
.
setEndTime
(
System
.
currentTimeMillis
());
dataSetTableTaskLogService
.
save
(
datasetTableTaskLog
);
break
;
saveSucessLog
(
datasetTableTaskLog
);
updateTableStatus
(
datasetTableId
,
datasetTable
,
JobStatus
.
Completed
);
}
catch
(
Exception
e
){
saveErrorLog
(
datasetTableId
,
taskId
,
e
);
updateTableStatus
(
datasetTableId
,
datasetTable
,
JobStatus
.
Error
);
dropDorisTable
(
DorisTableUtils
.
dorisTmpName
(
DorisTableUtils
.
dorisName
(
datasetTableId
)));
}
finally
{
if
(
datasetTableTask
!=
null
&&
datasetTableTask
.
getRate
().
equalsIgnoreCase
(
ScheduleType
.
SIMPLE
.
toString
()))
{
datasetTableTask
.
setRate
(
ScheduleType
.
SIMPLE_COMPLETE
.
toString
());
dataSetTableTaskService
.
update
(
datasetTableTask
);
}
deleteFile
(
"all_scope"
,
datasetTableId
);
}
break
;
// 增量更新
case
add_scope:
case
add_scope:
// 增量更新
try
{
if
(
datasource
.
getType
().
equalsIgnoreCase
(
"excel"
)){
datasetTableTaskLog
=
writeDatasetTableTaskLog
(
datasetTableTaskLog
,
datasetTableId
,
null
);
generateTransFile
(
"incremental_add"
,
datasetTable
,
datasource
,
datasetTableFields
,
null
);
generateJobFile
(
"incremental_add"
,
datasetTable
,
String
.
join
(
","
,
datasetTableFields
.
stream
().
map
(
DatasetTableField:
:
getDataeaseName
).
collect
(
Collectors
.
toList
())));
extractData
(
datasetTable
,
"incremental_add"
);
saveSucessLog
(
datasetTableTaskLog
);
updateTableStatus
(
datasetTableId
,
datasetTable
,
JobStatus
.
Completed
);
}
else
{
DatasetTableIncrementalConfig
datasetTableIncrementalConfig
=
dataSetTableService
.
incrementalConfig
(
datasetTableId
);
if
(
datasetTableIncrementalConfig
==
null
||
StringUtils
.
isEmpty
(
datasetTableIncrementalConfig
.
getTableId
()))
{
...
...
@@ -276,61 +227,146 @@ public class ExtractDataService {
DatasetTableTaskLog
request
=
new
DatasetTableTaskLog
();
request
.
setTableId
(
datasetTableId
);
request
.
setStatus
(
JobStatus
.
Completed
.
name
());
List
<
Data
SetTaskLogDTO
>
dataSetTaskLogDTOS
=
dataSetTableTaskLogService
.
lis
t
(
request
);
if
(
CollectionUtils
.
isEmpty
(
data
SetTaskLogDTOS
))
{
List
<
Data
setTableTaskLog
>
datasetTableTaskLogs
=
dataSetTableTaskLogService
.
selec
t
(
request
);
if
(
CollectionUtils
.
isEmpty
(
data
setTableTaskLogs
))
{
return
;
}
datasetTableTaskLog
=
writeDatasetTableTaskLog
(
datasetTableTaskLog
,
datasetTableId
,
taskId
);
// 增量添加
if
(
StringUtils
.
isNotEmpty
(
datasetTableIncrementalConfig
.
getIncrementalAdd
().
replace
(
" "
,
""
)))
{
String
sql
=
datasetTableIncrementalConfig
.
getIncrementalAdd
().
replace
(
lastUpdateTime
,
dataSetTaskLogDTOS
.
get
(
0
).
getStartTime
().
toString
()
if
(
datasetTableTask
!=
null
&&
datasetTableTask
.
getRate
().
equalsIgnoreCase
(
ScheduleType
.
CRON
.
toString
()))
{
datasetTableTaskLog
=
writeDatasetTableTaskLog
(
datasetTableTaskLog
,
datasetTableId
,
taskId
);
}
if
(
datasetTableTask
!=
null
&&
datasetTableTask
.
getRate
().
equalsIgnoreCase
(
ScheduleType
.
SIMPLE
.
toString
()))
{
datasetTableTaskLog
=
getDatasetTableTaskLog
(
datasetTableTaskLog
,
datasetTableId
,
taskId
);
}
if
(
StringUtils
.
isNotEmpty
(
datasetTableIncrementalConfig
.
getIncrementalAdd
().
replace
(
" "
,
""
)))
{
// 增量添加
String
sql
=
datasetTableIncrementalConfig
.
getIncrementalAdd
().
replace
(
lastUpdateTime
,
datasetTableTaskLogs
.
get
(
0
).
getStartTime
().
toString
()
.
replace
(
currentUpdateTime
,
Long
.
valueOf
(
System
.
currentTimeMillis
()).
toString
()));
generateTransFile
(
"incremental_add"
,
datasetTable
,
datasource
,
datasetTableFields
,
sql
);
generateJobFile
(
"incremental_add"
,
datasetTable
,
fetchSqlField
(
sql
,
datasource
));
extractData
(
datasetTable
,
"incremental_add"
);
}
// 增量删除
if
(
StringUtils
.
isNotEmpty
(
datasetTableIncrementalConfig
.
getIncrementalDelete
()))
{
String
sql
=
datasetTableIncrementalConfig
.
getIncrementalDelete
().
replace
(
lastUpdateTime
,
dataSetTaskLogDTOS
.
get
(
0
).
getStartTime
().
toString
()
if
(
StringUtils
.
isNotEmpty
(
datasetTableIncrementalConfig
.
getIncrementalDelete
().
replace
(
" "
,
""
)))
{
// 增量删除
String
sql
=
datasetTableIncrementalConfig
.
getIncrementalDelete
().
replace
(
lastUpdateTime
,
datasetTableTaskLogs
.
get
(
0
).
getStartTime
().
toString
()
.
replace
(
currentUpdateTime
,
Long
.
valueOf
(
System
.
currentTimeMillis
()).
toString
()));
generateTransFile
(
"incremental_delete"
,
datasetTable
,
datasource
,
datasetTableFields
,
sql
);
generateJobFile
(
"incremental_delete"
,
datasetTable
,
fetchSqlField
(
sql
,
datasource
));
extractData
(
datasetTable
,
"incremental_delete"
);
}
saveSucessLog
(
datasetTableTaskLog
);
updateTableStatus
(
datasetTableId
,
datasetTable
,
JobStatus
.
Completed
);
}
datasetTableTaskLog
.
setStatus
(
JobStatus
.
Completed
.
name
());
datasetTableTaskLog
.
setEndTime
(
System
.
currentTimeMillis
());
dataSetTableTaskLogService
.
save
(
datasetTableTaskLog
);
break
;
}
catch
(
Exception
e
){
saveErrorLog
(
datasetTableId
,
taskId
,
e
);
updateTableStatus
(
datasetTableId
,
datasetTable
,
JobStatus
.
Error
);
}
finally
{
if
(
datasetTableTask
!=
null
&&
datasetTableTask
.
getRate
().
equalsIgnoreCase
(
ScheduleType
.
SIMPLE
.
toString
()))
{
datasetTableTask
.
setRate
(
ScheduleType
.
SIMPLE_COMPLETE
.
toString
());
dataSetTableTaskService
.
update
(
datasetTableTask
);
}
deleteFile
(
"incremental_add"
,
datasetTableId
);
deleteFile
(
"incremental_delete"
,
datasetTableId
);
}
break
;
}
datasetTable
.
setSyncStatus
(
JobStatus
.
Completed
.
name
());
DatasetTableExample
example
=
new
DatasetTableExample
();
example
.
createCriteria
().
andIdEqualTo
(
datasetTableId
);
datasetTableMapper
.
updateByExampleSelective
(
datasetTable
,
example
);
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
LogUtil
.
error
(
"Extract data error: "
+
datasetTableId
,
e
);
}
private
void
updateTableStatus
(
String
datasetTableId
,
DatasetTable
datasetTable
,
JobStatus
completed
)
{
datasetTable
.
setSyncStatus
(
completed
.
name
());
DatasetTableExample
example
=
new
DatasetTableExample
();
example
.
createCriteria
().
andIdEqualTo
(
datasetTableId
);
datasetTableMapper
.
updateByExampleSelective
(
datasetTable
,
example
);
}
private
void
saveSucessLog
(
DatasetTableTaskLog
datasetTableTaskLog
)
{
datasetTableTaskLog
.
setStatus
(
JobStatus
.
Completed
.
name
());
datasetTableTaskLog
.
setEndTime
(
System
.
currentTimeMillis
());
dataSetTableTaskLogService
.
save
(
datasetTableTaskLog
);
}
private
void
saveErrorLog
(
String
datasetTableId
,
String
taskId
,
Exception
e
){
LogUtil
.
error
(
"Extract data error: "
+
datasetTableId
,
e
);
DatasetTableTaskLog
datasetTableTaskLog
=
new
DatasetTableTaskLog
();
datasetTableTaskLog
.
setTableId
(
datasetTableId
);
datasetTableTaskLog
.
setStatus
(
JobStatus
.
Underway
.
name
());
if
(
StringUtils
.
isNotEmpty
(
taskId
)){
datasetTableTaskLog
.
setTaskId
(
taskId
);
}
List
<
DatasetTableTaskLog
>
datasetTableTaskLogs
=
dataSetTableTaskLogService
.
select
(
datasetTableTaskLog
);
if
(
CollectionUtils
.
isNotEmpty
(
datasetTableTaskLogs
)){
datasetTableTaskLog
=
datasetTableTaskLogs
.
get
(
0
);
datasetTableTaskLog
.
setStatus
(
JobStatus
.
Error
.
name
());
datasetTableTaskLog
.
setInfo
(
ExceptionUtils
.
getStackTrace
(
e
));
datasetTableTaskLog
.
setEndTime
(
System
.
currentTimeMillis
());
dataSetTableTaskLogService
.
save
(
datasetTableTaskLog
);
}
datasetTable
.
setSyncStatus
(
JobStatus
.
Error
.
name
());
DatasetTableExample
example
=
new
DatasetTableExample
();
example
.
createCriteria
().
andIdEqualTo
(
datasetTableId
);
datasetTableMapper
.
updateByExampleSelective
(
datasetTable
,
example
);
}
if
(
updateType
.
name
().
equalsIgnoreCase
(
"all_scope"
)){
dropDorisTable
(
DorisTableUtils
.
dorisTmpName
(
DorisTableUtils
.
dorisName
(
datasetTableId
)));
}
}
finally
{
if
(
datasetTableTask
!=
null
&&
datasetTableTask
.
getRate
().
equalsIgnoreCase
(
ScheduleType
.
SIMPLE
.
toString
()))
{
datasetTableTask
.
setRate
(
ScheduleType
.
SIMPLE_COMPLETE
.
toString
());
dataSetTableTaskService
.
update
(
datasetTableTask
);
private
String
createDorisTablColumnSql
(
List
<
DatasetTableField
>
datasetTableFields
)
{
String
Column_Fields
=
"dataease_uuid varchar(50), `"
;
for
(
DatasetTableField
datasetTableField
:
datasetTableFields
)
{
Column_Fields
=
Column_Fields
+
datasetTableField
.
getDataeaseName
()
+
"` "
;
switch
(
datasetTableField
.
getDeExtractType
())
{
case
0
:
if
(
datasetTableField
.
getSize
()
>
65533
)
{
Column_Fields
=
Column_Fields
+
"varchar(65533)"
+
",`"
;
}
else
{
Column_Fields
=
Column_Fields
+
"varchar(lenth)"
.
replace
(
"lenth"
,
String
.
valueOf
(
datasetTableField
.
getSize
()))
+
",`"
;
}
break
;
case
1
:
Column_Fields
=
Column_Fields
+
"varchar(lenth)"
.
replace
(
"lenth"
,
String
.
valueOf
(
datasetTableField
.
getSize
()))
+
",`"
;
break
;
case
2
:
Column_Fields
=
Column_Fields
+
"bigint(lenth)"
.
replace
(
"lenth"
,
String
.
valueOf
(
datasetTableField
.
getSize
()))
+
",`"
;
break
;
case
3
:
Column_Fields
=
Column_Fields
+
"DOUBLE"
+
",`"
;
break
;
case
4
:
Column_Fields
=
Column_Fields
+
"TINYINT(lenth)"
.
replace
(
"lenth"
,
String
.
valueOf
(
datasetTableField
.
getSize
()))
+
",`"
;
break
;
default
:
Column_Fields
=
Column_Fields
+
"varchar(lenth)"
.
replace
(
"lenth"
,
String
.
valueOf
(
datasetTableField
.
getSize
()))
+
",`"
;
break
;
}
}
Column_Fields
=
Column_Fields
.
substring
(
0
,
Column_Fields
.
length
()
-
2
);
Column_Fields
=
"("
+
Column_Fields
+
")\n"
;
return
Column_Fields
;
}
private
void
createDorisTable
(
String
dorisTableName
,
String
dorisTablColumnSql
)
throws
Exception
{
Datasource
dorisDatasource
=
(
Datasource
)
CommonBeanFactory
.
getBean
(
"DorisDatasource"
);
JdbcProvider
jdbcProvider
=
CommonBeanFactory
.
getBean
(
JdbcProvider
.
class
);
DatasourceRequest
datasourceRequest
=
new
DatasourceRequest
();
datasourceRequest
.
setDatasource
(
dorisDatasource
);
datasourceRequest
.
setQuery
(
creatTableSql
.
replace
(
"TABLE_NAME"
,
dorisTableName
).
replace
(
"Column_Fields"
,
dorisTablColumnSql
));
jdbcProvider
.
exec
(
datasourceRequest
);
}
private
void
dropDorisTable
(
String
dorisTableName
)
{
try
{
Datasource
dorisDatasource
=
(
Datasource
)
CommonBeanFactory
.
getBean
(
"DorisDatasource"
);
JdbcProvider
jdbcProvider
=
CommonBeanFactory
.
getBean
(
JdbcProvider
.
class
);
DatasourceRequest
datasourceRequest
=
new
DatasourceRequest
();
datasourceRequest
.
setDatasource
(
dorisDatasource
);
datasourceRequest
.
setQuery
(
dropTableSql
.
replace
(
"TABLE_NAME"
,
dorisTableName
));
jdbcProvider
.
exec
(
datasourceRequest
);
}
catch
(
Exception
ignore
){}
}
private
void
replaceTable
(
String
dorisTableName
)
throws
Exception
{
Datasource
dorisDatasource
=
(
Datasource
)
CommonBeanFactory
.
getBean
(
"DorisDatasource"
);
JdbcProvider
jdbcProvider
=
CommonBeanFactory
.
getBean
(
JdbcProvider
.
class
);
;
DatasourceRequest
datasourceRequest
=
new
DatasourceRequest
();
datasourceRequest
.
setDatasource
(
dorisDatasource
);
datasourceRequest
.
setQuery
(
"ALTER TABLE DORIS_TABLE REPLACE WITH TABLE DORIS_TMP_TABLE PROPERTIES('swap' = 'false');"
.
replace
(
"DORIS_TABLE"
,
dorisTableName
).
replace
(
"DORIS_TMP_TABLE"
,
DorisTableUtils
.
dorisTmpName
(
dorisTableName
)));
jdbcProvider
.
exec
(
datasourceRequest
);
}
private
DatasetTable
getDatasetTable
(
String
datasetTableId
){
...
...
@@ -724,6 +760,36 @@ public class ExtractDataService {
return
userDefinedJavaClassStep
;
}
private
void
deleteFile
(
String
type
,
String
dataSetTableId
){
String
transName
=
null
;
String
jobName
=
null
;
switch
(
type
)
{
case
"all_scope"
:
transName
=
"trans_"
+
dataSetTableId
;
jobName
=
"job_"
+
dataSetTableId
;
break
;
case
"incremental_add"
:
transName
=
"trans_add_"
+
dataSetTableId
;
jobName
=
"job_add_"
+
dataSetTableId
;
break
;
case
"incremental_delete"
:
transName
=
"trans_delete_"
+
dataSetTableId
;
jobName
=
"job_delete_"
+
dataSetTableId
;
break
;
default
:
break
;
}
try
{
File
file
=
new
File
(
root_path
+
jobName
+
".kjb"
);
FileUtils
.
forceDelete
(
file
);
}
catch
(
Exception
e
){}
try
{
File
file
=
new
File
(
root_path
+
transName
+
".ktr"
);
FileUtils
.
forceDelete
(
file
);
}
catch
(
Exception
e
){}
}
public
boolean
isKettleRunning
()
{
try
{
if
(!
InetAddress
.
getByName
(
carte
).
isReachable
(
1000
))
{
...
...
backend/src/main/resources/i18n/messages_en_US.properties
浏览文件 @
08a193d4
...
...
@@ -252,3 +252,4 @@ i18n_id_or_pwd_error=Invalid ID or password
i18n_datasource_delete
=
Data source is delete
i18n_dataset_delete
=
Data set is delete
i18n_chart_delete
=
Chart is delete
i18n_not_exec_add_sync
=
There is no completed synchronization task. Incremental synchronization cannot be performed
\ No newline at end of file
backend/src/main/resources/i18n/messages_zh_CN.properties
浏览文件 @
08a193d4
...
...
@@ -252,3 +252,4 @@ i18n_id_or_pwd_error=无效的ID或密码
i18n_datasource_delete
=
当前用到的数据源已被删除
i18n_dataset_delete
=
当前用到的数据集已被删除
i18n_chart_delete
=
当前用到的视图已被删除
i18n_not_exec_add_sync
=
没有已完成的同步任务,无法进行增量同步
backend/src/main/resources/i18n/messages_zh_TW.properties
浏览文件 @
08a193d4
...
...
@@ -254,3 +254,4 @@ i18n_id_or_pwd_error=無效的ID或密碼
i18n_datasource_delete
=
當前用到的數據源已被刪除
i18n_dataset_delete
=
當前用到的數據集已被刪除
i18n_chart_delete
=
當前用到的視圖已被刪除
i18n_not_exec_add_sync
=
沒有已經完成的同步任務,無法進行增量同步
\ No newline at end of file
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论