diff --git a/build.gradle b/build.gradle index f419d850..0641d5df 100644 --- a/build.gradle +++ b/build.gradle @@ -36,6 +36,11 @@ buildscript { url "http://maven.aliyun.com/nexus/content/groups/public/" } } + + dependencies { + classpath 'net.sf.proguard:proguard-gradle:6.2.0' + classpath 'org.springframework.boot:spring-boot-gradle-plugin:1.5.15.RELEASE' + } } dependencies { @@ -91,7 +96,7 @@ distributions { fileMode = 0755 } into { "doc" } { - from "web/app/src/main/doc" + from "docs" dirMode = 0755 fileMode = 0644 } @@ -130,20 +135,23 @@ subprojects { configurations.all { resolutionStrategy { - force 'org.springframework:spring-aop:5.1.15.RELEASE' - force 'org.springframework:spring-aspects:5.1.15.RELEASE' - force 'org.springframework:spring-beans:5.1.15.RELEASE' - force 'org.springframework:spring-context:5.1.15.RELEASE' - force 'org.springframework:spring-core:5.1.15.RELEASE' - force 'org.springframework:spring-expressions:5.1.15.RELEASE' - force 'org.springframework:spring-jcl:5.1.15.RELEASE' - force 'org.springframework:spring-jdbc:5.1.15.RELEASE' - force 'org.springframework:spring-orm:5.1.15.RELEASE' - force 'org.springframework:spring-test:5.1.15.RELEASE' - force 'org.springframework:spring-tx:5.1.15.RELEASE' - force 'org.springframework:spring-web:5.1.15.RELEASE' - force 'org.springframework:spring-webmvc:5.1.15.RELEASE' + force 'org.springframework:spring-aop:5.1.18.RELEASE' + force 'org.springframework:spring-aspects:5.1.18.RELEASE' + force 'org.springframework:spring-beans:5.1.18.RELEASE' + force 'org.springframework:spring-context:5.1.18.RELEASE' + force 'org.springframework:spring-core:5.1.18.RELEASE' + force 'org.springframework:spring-expressions:5.1.18.RELEASE' + force 'org.springframework:spring-jcl:5.1.18.RELEASE' + force 'org.springframework:spring-jdbc:5.1.18.RELEASE' + force 'org.springframework:spring-orm:5.1.18.RELEASE' + force 'org.springframework:spring-test:5.1.18.RELEASE' + force 'org.springframework:spring-tx:5.1.18.RELEASE' + force 'org.springframework:spring-web:5.1.18.RELEASE' + force 'org.springframework:spring-webmvc:5.1.18.RELEASE' + force 'org.apache.logging.log4j:log4j-slf4j-impl:2.16.0' } + exclude group: 'log4j', module: 'log4j' + exclude group: 'org.codehaus.jackson', module: 'jackson-mapper-asl' } apply from: "profile.gradle" \ No newline at end of file diff --git a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/TaskResultDao.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/TaskResultDao.java index 68b8b2e3..deeccae4 100644 --- a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/TaskResultDao.java +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/TaskResultDao.java @@ -24,14 +24,13 @@ * @author howeye */ public interface TaskResultDao { - /** * Find task result by application id and rule id * @param applicationId * @param ruleId * @return */ - TaskResult findByApplicationIdAndRuleId(String applicationId, Long ruleId); + List findByApplicationAndRule(String applicationId, Long ruleId); /** * Find avg value between create time and rule id @@ -40,7 +39,7 @@ public interface TaskResultDao { * @param ruleId * @return */ - Double findAvgByCreateTimeBetweenAndRuleId(String begin, String end, Long ruleId); + Double findAvgByCreateTimeBetweenAndRule(String begin, String end, Long ruleId); /** * Find task result by application and rule id in @@ -48,6 +47,72 @@ public interface TaskResultDao { * @param ruleIds * @return */ - List findByApplicationIdAndRuleIdIn(String applicationId, List ruleIds); + List findByApplicationIdAndRuleIn(String applicationId, List ruleIds); + + /** + * Save file task result. + * @param taskResult + * @return + */ + TaskResult saveTaskResult(TaskResult taskResult); + + /** + * Find rule IDs by rule metric ID. + * @param id + * @param page + * @param size + * @return + */ + List findRuleByRuleMetric(Long id, int page, int size); + + /** + * Find values by rule ID and rule metric ID. + * @param ruleMetricId + * @param page + * @param size + * @return + */ + List findValuesByRuleMetric(long ruleMetricId, int page, int size); + + /** + * Find avg value by rule ID and rule metric ID. + * @param start + * @param end + * @param ruleId + * @param ruleMetricId + * @return + */ + Double findAvgByCreateTimeBetweenAndRuleAndRuleMetric(String start, String end, Long ruleId, Long ruleMetricId); + /** + * Find value. + * @param applicationId + * @param ruleId + * @param ruleMetricId + * @return + */ + TaskResult find(String applicationId, Long ruleId, Long ruleMetricId); + + /** + * Find value. + * @param runDate + * @param ruleId + * @param ruleMetricId + * @return + */ + TaskResult find(Long runDate, Long ruleId, Long ruleMetricId); + + /** + * Count values. + * @param ruleMetricId + * @return + */ + int countValuesByRuleMetric(long ruleMetricId); + + /** + * Count rules. + * @param ruleMetricId + * @return + */ + int countRuleByRuleMetric(Long ruleMetricId); } diff --git a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/TaskResultDaoImpl.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/TaskResultDaoImpl.java index d7df8a76..ff069a83 100644 --- a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/TaskResultDaoImpl.java +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/TaskResultDaoImpl.java @@ -19,8 +19,10 @@ import com.webank.wedatasphere.qualitis.dao.TaskResultDao; import com.webank.wedatasphere.qualitis.dao.repository.TaskResultRepository; import com.webank.wedatasphere.qualitis.entity.TaskResult; -import com.webank.wedatasphere.qualitis.dao.TaskResultDao; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; import org.springframework.stereotype.Repository; import java.util.List; @@ -30,22 +32,65 @@ */ @Repository public class TaskResultDaoImpl implements TaskResultDao { - @Autowired private TaskResultRepository resultRepository; @Override - public TaskResult findByApplicationIdAndRuleId(String applicationId, Long ruleId) { + public List findByApplicationAndRule(String applicationId, Long ruleId) { return resultRepository.findByApplicationIdAndRuleId(applicationId, ruleId); } @Override - public Double findAvgByCreateTimeBetweenAndRuleId(String begin, String end, Long ruleId) { - return resultRepository.findAvgByCreateTimeBetweenAndRuleId(begin, end, ruleId); + public Double findAvgByCreateTimeBetweenAndRule(String begin, String end, Long ruleId) { + return resultRepository.findAvgByCreateTimeBetween(begin, end, ruleId); } @Override - public List findByApplicationIdAndRuleIdIn(String applicationId, List ruleIds) { + public List findByApplicationIdAndRuleIn(String applicationId, List ruleIds) { return resultRepository.findByApplicationIdAndRuleIdIn(applicationId, ruleIds); } + + @Override + public TaskResult saveTaskResult(TaskResult taskResult) { + return resultRepository.save(taskResult); + } + + @Override + public List findRuleByRuleMetric(Long ruleMetricId, int page, int size) { + Sort sort = new Sort(Sort.Direction.ASC, "id"); + Pageable pageable = PageRequest.of(page, size, sort); + return resultRepository.findRuleByRuleMetricId(ruleMetricId, pageable).getContent(); + } + + @Override + public List findValuesByRuleMetric(long ruleMetricId, int page, int size) { + Sort sort = new Sort(Sort.Direction.DESC, "id"); + Pageable pageable = PageRequest.of(page, size, sort); + return resultRepository.findValuesByRuleAndRuleMetric(ruleMetricId, pageable).getContent(); + } + + @Override + public Double findAvgByCreateTimeBetweenAndRuleAndRuleMetric(String start, String end, Long ruleId, Long ruleMetricId) { + return resultRepository.findAvgByCreateTimeBetween(start, end, ruleId, ruleMetricId); + } + + @Override + public TaskResult find(String applicationId, Long ruleId, Long ruleMetricId) { + return resultRepository.findValue(applicationId, ruleId, ruleMetricId); + } + + @Override + public TaskResult find(Long runDate, Long ruleId, Long ruleMetricId) { + return resultRepository.findWithRunDate(runDate, ruleId, ruleMetricId); + } + + @Override + public int countValuesByRuleMetric(long ruleMetricId) { + return resultRepository.countValuesByRuleMetric(ruleMetricId); + } + + @Override + public int countRuleByRuleMetric(Long ruleMetricId) { + return resultRepository.countRulesByRuleMetric(ruleMetricId); + } } diff --git a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/TaskResultRepository.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/TaskResultRepository.java index a8c24bdd..f8f2eb33 100644 --- a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/TaskResultRepository.java +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/TaskResultRepository.java @@ -17,24 +17,23 @@ package com.webank.wedatasphere.qualitis.dao.repository; import com.webank.wedatasphere.qualitis.entity.TaskResult; -import com.webank.wedatasphere.qualitis.entity.TaskResult; +import java.util.List; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; -import java.util.List; - /** * @author howeye */ public interface TaskResultRepository extends JpaRepository { - /** * Find task result by application and ruls * @param applicationId * @param ruleId * @return */ - TaskResult findByApplicationIdAndRuleId(String applicationId, Long ruleId); + List findByApplicationIdAndRuleId(String applicationId, Long ruleId); /** * Find value avg from begin time and end time @@ -44,7 +43,19 @@ public interface TaskResultRepository extends JpaRepository { * @return */ @Query("select avg(value) from TaskResult t where (t.createTime between ?1 and ?2) and t.ruleId = ?3") - Double findAvgByCreateTimeBetweenAndRuleId(String begin, String end, Long ruleId); + Double findAvgByCreateTimeBetween(String begin, String end, Long ruleId); + + + /** + * Find avg value by rule ID and rule metric ID. + * @param begin + * @param end + * @param ruleId + * @param ruleMetricId + * @return + */ + @Query("select avg(value) from TaskResult t where (t.createTime between ?1 and ?2) and t.ruleId = ?3 and (t.ruleMetricId = ?4)") + Double findAvgByCreateTimeBetween(String begin, String end, Long ruleId, Long ruleMetricId); /** * Find task result by application and rule id @@ -53,4 +64,58 @@ public interface TaskResultRepository extends JpaRepository { * @return */ List findByApplicationIdAndRuleIdIn(String applicationId, List ruleIds); + + /** + * Find rule IDs by rule metric ID. + * @param id + * @param pageable + * @return + */ + @Query(value = "SELECT tr.ruleId from TaskResult tr where tr.ruleMetricId = ?1") + Page findRuleByRuleMetricId(Long id, Pageable pageable); + + /** + * Find values by rule ID and rule metric ID. + * @param ruleMetricId + * @param pageable + * @return + */ + @Query(value = "SELECT tr from TaskResult tr where tr.ruleMetricId = ?1") + Page findValuesByRuleAndRuleMetric(long ruleMetricId, Pageable pageable); + + /** + * Find value. + * @param applicationId + * @param ruleId + * @param ruleMetricId + * @return + */ + @Query(value = "SELECT tr from TaskResult tr where tr.applicationId = ?1 and tr.ruleId = ?2 and tr.ruleMetricId = ?3") + TaskResult findValue(String applicationId, Long ruleId, Long ruleMetricId); + + /** + * Find value with run date. + * @param runDate + * @param ruleId + * @param ruleMetricId + * @return + */ + @Query(value = "SELECT tr from TaskResult tr where tr.runDate = ?1 and tr.ruleId = ?2 and tr.ruleMetricId = ?3") + TaskResult findWithRunDate(Long runDate, Long ruleId, Long ruleMetricId); + + /** + * Count values. + * @param ruleMetricId + * @return + */ + @Query(value = "SELECT count(tr.id) from TaskResult tr where tr.ruleMetricId = ?1") + int countValuesByRuleMetric(long ruleMetricId); + + /** + * Count rules. + * @param ruleMetricId + * @return + */ + @Query(value = "SELECT count(tr.ruleId) from TaskResult tr where tr.ruleMetricId = ?1") + int countRulesByRuleMetric(Long ruleMetricId); } diff --git a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/TaskResult.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/TaskResult.java index 8e45cd6b..b17ba9ae 100644 --- a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/TaskResult.java +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/TaskResult.java @@ -33,11 +33,19 @@ public class TaskResult { private String applicationId; @Column(name = "rule_id") private Long ruleId; - private Double value; + private String value; @Column(name = "result_type") private String resultType; @Column(name = "create_time") private String createTime; + @Column(name = "save_result") + private Boolean saveResult; + @Column(name = "rule_metric_id") + private Long ruleMetricId; + @Column(name = "run_date") + private Long runDate; + @Column(name = "department_code") + private String departmentCode; public TaskResult() { // Default Constructor @@ -67,11 +75,11 @@ public void setRuleId(Long ruleId) { this.ruleId = ruleId; } - public Double getValue() { + public String getValue() { return value; } - public void setValue(Double value) { + public void setValue(String value) { this.value = value; } @@ -90,4 +98,36 @@ public String getCreateTime() { public void setCreateTime(String createTime) { this.createTime = createTime; } + + public Boolean getSaveResult() { + return saveResult; + } + + public void setSaveResult(Boolean saveResult) { + this.saveResult = saveResult; + } + + public Long getRuleMetricId() { + return ruleMetricId; + } + + public void setRuleMetricId(Long ruleMetricId) { + this.ruleMetricId = ruleMetricId; + } + + public Long getRunDate() { + return runDate; + } + + public void setRunDate(Long runDate) { + this.runDate = runDate; + } + + public String getDepartmentCode() { + return departmentCode; + } + + public void setDepartmentCode(String departmentCode) { + this.departmentCode = departmentCode; + } } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/LocalConfig.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/LocalConfig.java new file mode 100644 index 00000000..06cca527 --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/LocalConfig.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.qualitis; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Configuration; + +/** + * @author allenzhou@webank.com + * @date 2021/9/7 16:20 + */ +@Configuration +public class LocalConfig { + /** + * zn_CN or en. + */ + @Value("${front_end.local}") + private String local; + /** + * dev or prod. + */ + @Value("${front_end.center}") + private String center; + + public String getLocal() { + return local; + } + + public void setLocal(String local) { + this.local = local; + } + + public String getCenter() { + return center; + } + + public void setCenter(String center) { + this.center = center; + } +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/PermissionDeniedRequestException.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/PermissionDeniedRequestException.java new file mode 100644 index 00000000..c4d29379 --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/PermissionDeniedRequestException.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.exception; + +import com.webank.wedatasphere.qualitis.response.GeneralResponse; + +/** + * @author howeye + */ +public class PermissionDeniedRequestException extends Exception { + private Integer status; + + public PermissionDeniedRequestException(String message) { + super(message); + this.status = 403; + } + + public PermissionDeniedRequestException(String message, Integer status) { + super(message); + this.status = status; + } + public GeneralResponse getResponse() { + return new GeneralResponse<>(this.status + "", getMessage(), null); + } + + public Integer getStatus() { + return status; + } + + public void setStatus(Integer status) { + this.status = status; + } +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/UnExpectedRequestException.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/UnExpectedRequestException.java index b7863fbf..68d65d03 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/UnExpectedRequestException.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/UnExpectedRequestException.java @@ -22,13 +22,27 @@ * @author howeye */ public class UnExpectedRequestException extends Exception { + private Integer status; public UnExpectedRequestException(String message) { super(message); + status = 400; + } + + public UnExpectedRequestException(String message, Integer status) { + super(message); + this.status = status; } public GeneralResponse getResponse() { - return new GeneralResponse<>("400", getMessage(), null); + return new GeneralResponse<>(this.status + "", getMessage(), null); } + public Integer getStatus() { + return status; + } + + public void setStatus(Integer status) { + this.status = status; + } } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/mapper/PermissionDeniedUserRequestExceptionMapper.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/mapper/PermissionDeniedUserRequestExceptionMapper.java new file mode 100644 index 00000000..a27eb2e5 --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/mapper/PermissionDeniedUserRequestExceptionMapper.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.qualitis.exception.mapper; + +import com.webank.wedatasphere.qualitis.exception.PermissionDeniedRequestException; +import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; +import com.webank.wedatasphere.qualitis.parser.LocaleParser; +import javax.ws.rs.core.Response; +import javax.ws.rs.ext.ExceptionMapper; +import javax.ws.rs.ext.Provider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author allenzhou + * + * 作用是PermissionDeniedUserRequestException被直接抛出到前端时,UnExpectedUserRequestExceptionMapper实现的toResponse方法会将该异常作为参数并信息处理之后再返回 + * (比如国际化替换)。 + */ +@Provider +public class PermissionDeniedUserRequestExceptionMapper implements ExceptionMapper { + + private static final Logger LOGGER = LoggerFactory.getLogger(PermissionDeniedUserRequestExceptionMapper.class); + + @Autowired + private LocaleParser localeParser; + + @Override + public Response toResponse(PermissionDeniedRequestException exception) { + String message = localeParser.replacePlaceHolderByLocale(exception.getMessage(), "en_US"); + LOGGER.warn(message, exception); + return Response.ok(exception.getResponse()).status(exception.getStatus()).build(); + } +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/mapper/UnExpectedUserRequestExceptionMapper.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/mapper/UnExpectedUserRequestExceptionMapper.java index 53f96908..6feb8998 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/mapper/UnExpectedUserRequestExceptionMapper.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/exception/mapper/UnExpectedUserRequestExceptionMapper.java @@ -13,23 +13,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.webank.wedatasphere.qualitis.exception.mapper; -import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; -import com.webank.wedatasphere.qualitis.parser.LocaleParser; import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; import com.webank.wedatasphere.qualitis.parser.LocaleParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; - -import javax.ws.rs.core.Response; import javax.ws.rs.ext.ExceptionMapper; +import javax.ws.rs.core.Response; import javax.ws.rs.ext.Provider; /** * @author howeye + * + * 作用是UnexpectedRequestException被直接抛出到前端时,UnExpectedUserRequestExceptionMapper实现的toResponse方法会将该异常作为参数并信息处理之后再返回 + * (比如国际化替换)。 */ @Provider public class UnExpectedUserRequestExceptionMapper implements ExceptionMapper { @@ -43,6 +42,6 @@ public class UnExpectedUserRequestExceptionMapper implements ExceptionMapper getTableByUserAndDb(GetTableByUserAndDbRequest request * @throws MetaDataAcquireFailedException * @throws UnExpectedRequestException */ - String getTableComment(String clusterName, String dbName, String tableName, String userName) + String getTableBasicInfo(String clusterName, String dbName, String tableName, String userName) + throws MetaDataAcquireFailedException, UnExpectedRequestException; + + /** + * Get table by context service ID and DSS node name + * @param request + * @return + * @throws MetaDataAcquireFailedException + * @throws UnExpectedRequestException + */ + DataInfo getTableByCsId(GetUserTableByCsIdRequest request) throws MetaDataAcquireFailedException, UnExpectedRequestException; /** @@ -100,4 +118,252 @@ DataInfo getColumnByUserAndTable(GetColumnByUserAndTableReques List getColumnInfo(String clusterName, String dbName, String tableName, String userName) throws MetaDataAcquireFailedException, UnExpectedRequestException; + /** + * Get column by context service ID and table context key + * @param request + * @return + * @throws MetaDataAcquireFailedException + * @throws UnExpectedRequestException + */ + DataInfo getColumnByCsId(GetUserColumnByCsRequest request) + throws MetaDataAcquireFailedException, UnExpectedRequestException; + + /** + * Get table statistics info. + * @param clusterName + * @param dbName + * @param tableName + * @param user + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + * @throws RestClientException + */ + TableStatisticsInfo getTableStatisticsInfo(String clusterName, String dbName, String tableName, String user) + throws UnExpectedRequestException, MetaDataAcquireFailedException, RestClientException; + + /** + * Get partition statistics info. + * @param clusterName + * @param dbName + * @param tableName + * @param partitionPath + * @param user + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + * @throws RestClientException + */ + PartitionStatisticsInfo getPartitionStatisticsInfo(String clusterName, String dbName, String tableName, String partitionPath, String user) + throws UnExpectedRequestException, MetaDataAcquireFailedException, RestClientException; + + /** + * Check field. + * @param col + * @param cols + * @param mappingCols + * @return + */ + boolean fieldExist(String col, List cols, Map mappingCols); + + /** + * Get all data source types. + * @param clusterName + * @param userName + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse getAllDataSourceTypes(String clusterName, String userName) + throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Get data source env. + * @param clusterName + * @param userName + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse getDataSourceEnv(String clusterName, String userName) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Get data source info pageable. + * @param clusterName + * @param userName + * @param page + * @param size + * @param searchName + * @param typeId + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + * @throws UnsupportedEncodingException + */ + GeneralResponse getDataSourceInfoPage(String clusterName, String userName, int page, int size, String searchName, Long typeId) + throws UnExpectedRequestException, MetaDataAcquireFailedException, UnsupportedEncodingException; + + /** + * Get data source versions. + * @param clusterName + * @param userName + * @param dataSourceId + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse getDataSourceVersions(String clusterName, String userName, Long dataSourceId) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Get data source info detail. + * @param clusterName + * @param userName + * @param dataSourceId + * @param versionId + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse getDataSourceInfoDetail(String clusterName, String userName, Long dataSourceId, Long versionId) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Get data source info detail by name. + * @param clusterName + * @param authUser + * @param dataSourceName + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse getDataSourceInfoDetailByName(String clusterName, String authUser, + String dataSourceName) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Get data source key define. + * @param clusterName + * @param userName + * @param keyId + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse getDataSourceKeyDefine(String clusterName, String userName, Long keyId) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Connect to data source. + * @param clusterName + * @param userName + * @param jsonRequest + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse connectDataSource(String clusterName, String userName, String jsonRequest) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Get connect params. + * @param clusterName + * @param authUser + * @param dataSourceId + * @param versionId + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse getDataSourceConnectParams(String clusterName, String authUser, Long dataSourceId, + Long versionId) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Publish data source. + * @param clusterName + * @param userName + * @param dataSourceId + * @param versionId + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse publishDataSource(String clusterName, String userName, Long dataSourceId, Long versionId) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Expire data source. + * @param clusterName + * @param userName + * @param dataSourceId + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse expireDataSource(String clusterName, String userName, Long dataSourceId)throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Modify data source. + * @param clusterName + * @param userName + * @param dataSourceId + * @param jsonRequest + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse modifyDataSource(String clusterName, String userName, Long dataSourceId, String jsonRequest) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Modify data source param. + * @param clusterName + * @param userName + * @param dataSourceId + * @param jsonRequest + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse modifyDataSourceParam(String clusterName, String userName, Long dataSourceId, String jsonRequest) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Create data source param. + * @param clusterName + * @param userName + * @param jsonRequest + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse createDataSource(String clusterName, String userName, String jsonRequest) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Get db by data source. + * @param clusterName + * @param userName + * @param dataSourceId + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + Map getDbsByDataSource(String clusterName, String userName, Long dataSourceId) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Get table by data source. + * @param clusterName + * @param userName + * @param dataSourceId + * @param dbName + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + Map getTablesByDataSource(String clusterName, String userName, Long dataSourceId, String dbName) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Get column by data source. + * @param clusterName + * @param userName + * @param dataSourceId + * @param dbName + * @param tableName + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + DataInfo getColumnsByDataSource(String clusterName, String userName, Long dataSourceId, String dbName, String tableName) throws UnExpectedRequestException, MetaDataAcquireFailedException; } \ No newline at end of file diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/constant/DataMapResponseKeyEnum.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/constant/DataMapResponseKeyEnum.java new file mode 100644 index 00000000..1160ac2a --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/constant/DataMapResponseKeyEnum.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.qualitis.metadata.constant; + +/** + * @author allenzhou@webank.com + * @date 2021/6/4 18:07 + */ +public enum DataMapResponseKeyEnum { + /** + * Linkis response map object key enum. + */ + PROGRESS("progress"), + CODE("code"), + TASK("task"), + DATA("data") + ; + + private String key; + + DataMapResponseKeyEnum(String key) { + this.key = key; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/constant/RuleConstraintEnum.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/constant/RuleConstraintEnum.java new file mode 100644 index 00000000..e03f170e --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/constant/RuleConstraintEnum.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.qualitis.metadata.constant; + +/** + * @author allenzhou + */ + +public enum RuleConstraintEnum { + /** + * For spec case solution. + * CUSTOM_DATABASE_PREFIS: for custom rule configuration in context service when has no database. + */ + CUSTOM_DATABASE_PREFIS("linkis_cs_tmp_db"), + DEFAULT_NODENAME("qualitis_node"); + + private String value; + RuleConstraintEnum(String str) { + this.value = str; + } + + public String getValue() { + return value; + } +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/exception/MetaDataAcquireFailedException.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/exception/MetaDataAcquireFailedException.java index a0c935de..a9b14ff3 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/exception/MetaDataAcquireFailedException.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/exception/MetaDataAcquireFailedException.java @@ -16,13 +16,34 @@ package com.webank.wedatasphere.qualitis.metadata.exception; +import com.webank.wedatasphere.qualitis.response.GeneralResponse; + /** * @author v_wblwyan * @date 2018-12-07 */ public class MetaDataAcquireFailedException extends Exception { + private Integer status; public MetaDataAcquireFailedException(String message) { super(message); + this.status = 400; + } + + public MetaDataAcquireFailedException(String message, Integer status) { + super(message); + this.status = status; + } + + public Integer getStatus() { + return status; + } + + public void setStatus(Integer status) { + this.status = status; + } + + public GeneralResponse getResponse() { + return new GeneralResponse<>(this.status + "", getMessage(), null); } } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/exception/mapper/MetaDataAcquireFailedExceptionMapper.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/exception/mapper/MetaDataAcquireFailedExceptionMapper.java new file mode 100644 index 00000000..34ff0077 --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/exception/mapper/MetaDataAcquireFailedExceptionMapper.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.qualitis.metadata.exception.mapper; + +import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; +import com.webank.wedatasphere.qualitis.metadata.exception.MetaDataAcquireFailedException; +import com.webank.wedatasphere.qualitis.parser.LocaleParser; +import javax.ws.rs.core.Response; +import javax.ws.rs.ext.ExceptionMapper; +import javax.ws.rs.ext.Provider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author allenzhou + * + * 作用是MetaDataAcquireFailedException被直接抛出到前端时,MetaDataAcquireFailedExceptionMapper实现的toResponse方法会将该异常作为参数并信息处理之后再返回 + * (比如国际化替换)。 + */ +@Provider +public class MetaDataAcquireFailedExceptionMapper implements ExceptionMapper { + + private static final Logger LOGGER = LoggerFactory.getLogger(MetaDataAcquireFailedException.class); + + @Autowired + private LocaleParser localeParser; + + @Override + public Response toResponse(MetaDataAcquireFailedException exception) { + String message = localeParser.replacePlaceHolderByLocale(exception.getMessage(), "en_US"); + LOGGER.warn(message, exception); + return Response.ok(exception.getResponse()).status(exception.getStatus()).build(); + } +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetClusterByUserRequest.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetClusterByUserRequest.java index 37e29589..f8c7ab4c 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetClusterByUserRequest.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetClusterByUserRequest.java @@ -20,7 +20,9 @@ * @author howeye */ public class GetClusterByUserRequest { - + /** + * Clust list in UI + */ private String loginUser; private Integer startIndex; private Integer pageSize; diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetColumnByUserAndTableRequest.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetColumnByUserAndTableRequest.java index c6bd9ab8..03705f8a 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetColumnByUserAndTableRequest.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetColumnByUserAndTableRequest.java @@ -20,7 +20,6 @@ * @author howeye */ public class GetColumnByUserAndTableRequest { - private String loginUser; private Integer startIndex; private Integer pageSize; diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetDbByUserAndClusterRequest.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetDbByUserAndClusterRequest.java index 0662095f..b55adc26 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetDbByUserAndClusterRequest.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetDbByUserAndClusterRequest.java @@ -20,7 +20,6 @@ * @author howeye */ public class GetDbByUserAndClusterRequest { - private String loginUser; private Integer startIndex; private Integer pageSize; diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetTableByUserAndDbRequest.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetTableByUserAndDbRequest.java index fcdda31c..b23e4cbb 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetTableByUserAndDbRequest.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetTableByUserAndDbRequest.java @@ -20,7 +20,6 @@ * @author howeye */ public class GetTableByUserAndDbRequest { - private String loginUser; private Integer startIndex; private Integer pageSize; diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserColumnByCsRequest.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserColumnByCsRequest.java new file mode 100644 index 00000000..8a9e20d2 --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserColumnByCsRequest.java @@ -0,0 +1,96 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.metadata.request; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * 根据 DataSphereStudio提供的接口,封装查询上游表字段的请求。 + * cs:包含context service ID 和表的 context key。 + * + * @author allenzhou + */ +public class GetUserColumnByCsRequest { + private static final int DEFAULT_START_INDEX = 0; + private static final int DEFAULT_PAGE_SIZE = 20; + + @JsonProperty("start_index") + private Integer startIndex; + @JsonProperty("page_size") + private Integer pageSize; + @JsonProperty("cs_id") + private String csId; + @JsonProperty("context_key") + private String contextKey; + @JsonProperty("cluster_name") + private String clusterName; + + private String loginUser; + + public GetUserColumnByCsRequest() { + startIndex = DEFAULT_START_INDEX; + pageSize = DEFAULT_PAGE_SIZE; + } + + public Integer getStartIndex() { + return startIndex; + } + + public void setStartIndex(Integer startIndex) { + this.startIndex = startIndex; + } + + public Integer getPageSize() { + return pageSize; + } + + public void setPageSize(Integer pageSize) { + this.pageSize = pageSize; + } + + public String getCsId() { + return csId; + } + + public void setCsId(String csId) { + this.csId = csId; + } + + public String getContextKey() { + return contextKey; + } + + public void setContextKey(String contextKey) { + this.contextKey = contextKey; + } + + public String getClusterName() { + return clusterName; + } + + public void setClusterName(String clusterName) { + this.clusterName = clusterName; + } + + public String getLoginUser() { + return loginUser; + } + + public void setLoginUser(String loginUser) { + this.loginUser = loginUser; + } +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserTableByCsIdRequest.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserTableByCsIdRequest.java new file mode 100644 index 00000000..f8a77a04 --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserTableByCsIdRequest.java @@ -0,0 +1,73 @@ +package com.webank.wedatasphere.qualitis.metadata.request; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * @author allenzhou + */ +public class GetUserTableByCsIdRequest { + private static final int DEFAULT_START_INDEX = 0; + private static final int DEFAULT_PAGE_SIZE = 20; + + @JsonProperty("cs_id") + private String csId; + @JsonProperty("node_name") + private String nodeName; + @JsonProperty("cluster_name") + private String clusterName; + @JsonProperty("start_index") + private Integer startIndex; + @JsonProperty("page_size") + private Integer pageSize; + + private String loginUser; + + public GetUserTableByCsIdRequest() { + startIndex = DEFAULT_START_INDEX; + pageSize = DEFAULT_PAGE_SIZE; + } + + public Integer getStartIndex() { + return startIndex; + } + + public void setStartIndex(Integer startIndex) { + this.startIndex = startIndex; + } + + public Integer getPageSize() { + return pageSize; + } + + public void setPageSize(Integer pageSize) { + this.pageSize = pageSize; + } + + public String getCsId() { + return csId; + } + + public void setCsId(String csId) { + this.csId = csId; + } + + public String getNodeName() { return nodeName; } + + public void setNodeName(String nodeName) { this.nodeName = nodeName; } + + public String getClusterName() { + return clusterName; + } + + public void setClusterName(String clusterName) { + this.clusterName = clusterName; + } + + public String getLoginUser() { + return loginUser; + } + + public void setLoginUser(String loginUser) { + this.loginUser = loginUser; + } +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/DataInfo.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/DataInfo.java index b9e8cf03..5446069d 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/DataInfo.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/DataInfo.java @@ -22,7 +22,6 @@ * @author howeye */ public class DataInfo { - private int totalCount; private List content; diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/cluster/ClusterInfoDetail.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/cluster/ClusterInfoDetail.java index efe6c36c..867003f8 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/cluster/ClusterInfoDetail.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/cluster/ClusterInfoDetail.java @@ -22,7 +22,6 @@ * @author howeye */ public class ClusterInfoDetail { - @JsonProperty("source_type") private String sourceType; @JsonProperty("cluster_name") diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/ClusterMappingDetail.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/cluster/ClusterMappingDetail.java similarity index 94% rename from core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/ClusterMappingDetail.java rename to core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/cluster/ClusterMappingDetail.java index b538787d..d399e891 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/ClusterMappingDetail.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/cluster/ClusterMappingDetail.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.webank.wedatasphere.qualitis.metadata.response; +package com.webank.wedatasphere.qualitis.metadata.response.cluster; import java.util.HashMap; import java.util.Map; @@ -23,7 +23,6 @@ * @author howeye */ public class ClusterMappingDetail { - private Map clusterType; public ClusterMappingDetail() { diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/column/ColumnInfoDetail.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/column/ColumnInfoDetail.java index d3773423..3e6ee1e4 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/column/ColumnInfoDetail.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/column/ColumnInfoDetail.java @@ -22,7 +22,6 @@ * @author howeye */ public class ColumnInfoDetail { - @JsonProperty("column_name") private String fieldName; @JsonProperty("data_type") @@ -31,7 +30,7 @@ public class ColumnInfoDetail { * Extended attributes, displayed as data source rule query. */ @JsonProperty("column_length") - private int columnLen; + private Integer columnLen; @JsonProperty("column_alias") private String columnAlias; @JsonProperty("column_comment") @@ -40,6 +39,8 @@ public class ColumnInfoDetail { private Boolean isPrimary; @JsonProperty("is_partition") private Boolean isPartitionField; + @JsonProperty("rule_count") + private Integer ruleCount; public ColumnInfoDetail() { // Default Constructor @@ -66,11 +67,11 @@ public void setDataType(String dataType) { this.dataType = dataType; } - public int getColumnLen() { + public Integer getColumnLen() { return columnLen; } - public void setColumnLen(int columnLen) { + public void setColumnLen(Integer columnLen) { this.columnLen = columnLen; } @@ -105,4 +106,20 @@ public Boolean getPartitionField() { public void setPartitionField(Boolean partitionField) { isPartitionField = partitionField; } + + public Integer getRuleCount() { + return ruleCount; + } + + public void setRuleCount(Integer ruleCount) { + this.ruleCount = ruleCount; + } + + @Override + public String toString() { + return "ColumnInfoDetail{" + + "fieldName='" + fieldName + '\'' + + ", dataType='" + dataType + '\'' + + '}'; + } } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/db/DbInfoDetail.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/db/DbInfoDetail.java index 4f7605bb..ae1c1312 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/db/DbInfoDetail.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/db/DbInfoDetail.java @@ -22,7 +22,6 @@ * @author howeye */ public class DbInfoDetail { - @JsonProperty("db_name") private String dbName; @@ -41,5 +40,4 @@ public String getDbName() { public void setDbName(String dbName) { this.dbName = dbName; } - } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/CsTableInfoDetail.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/CsTableInfoDetail.java index 76c3dbf5..c0dab511 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/CsTableInfoDetail.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/CsTableInfoDetail.java @@ -6,7 +6,6 @@ * @author allenzhou */ public class CsTableInfoDetail { - @JsonProperty("table_name") private String tableName; @JsonProperty("context_Key") diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/PartitionStatisticsInfo.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/PartitionStatisticsInfo.java new file mode 100644 index 00000000..0a44219d --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/PartitionStatisticsInfo.java @@ -0,0 +1,61 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.metadata.response.table; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; +import java.util.Map; + +/** + * @author allenzhou + */ +public class PartitionStatisticsInfo { + @JsonProperty("partition_size") + private String partitionSize; + @JsonProperty("partition_child_count") + private int partitionChildCount; + @JsonProperty("partitions") + private List partitions; + + public PartitionStatisticsInfo() { + // Default Constructor + } + + public String getPartitionSize() { + return partitionSize; + } + + public void setPartitionSize(String partitionSize) { + this.partitionSize = partitionSize; + } + + public int getPartitionChildCount() { + return partitionChildCount; + } + + public void setPartitionChildCount(int partitionChildCount) { + this.partitionChildCount = partitionChildCount; + } + + public List getPartitions() { + return partitions; + } + + public void setPartitions(List partitions) { + this.partitions = partitions; + } +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/TableInfoDetail.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/TableInfoDetail.java index 02d1759b..723e5243 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/TableInfoDetail.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/TableInfoDetail.java @@ -22,7 +22,6 @@ * @author howeye */ public class TableInfoDetail { - @JsonProperty("table_name") private String tableName; @JsonProperty("table_desc") diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/TableStatisticsInfo.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/TableStatisticsInfo.java new file mode 100644 index 00000000..023fb057 --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/table/TableStatisticsInfo.java @@ -0,0 +1,61 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.metadata.response.table; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; +import java.util.Map; + +/** + * @author allenzhou + */ +public class TableStatisticsInfo { + @JsonProperty("table_size") + private String tableSize; + @JsonProperty("table_file_count") + private int tableFileCount; + @JsonProperty("partitions") + private List partitions; + + public TableStatisticsInfo() { + // Default Constructor + } + + public String getTableSize() { + return tableSize; + } + + public void setTableSize(String tableSize) { + this.tableSize = tableSize; + } + + public int getTableFileCount() { + return tableFileCount; + } + + public void setTableFileCount(int tableFileCount) { + this.tableFileCount = tableFileCount; + } + + public List getPartitions() { + return partitions; + } + + public void setPartitions(List partitions) { + this.partitions = partitions; + } +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/parser/HiveSqlParser.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/parser/HiveSqlParser.java index 15698370..394cb88e 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/parser/HiveSqlParser.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/parser/HiveSqlParser.java @@ -28,11 +28,11 @@ * @author howeye */ public class HiveSqlParser { + private static final Integer DB_AND_TABLE_LENGTH = 2; private Map> dbAndTableMap = new HashMap<>(); private static final Logger LOGGER = LoggerFactory.getLogger(HiveSqlParser.class); - private static final Integer DB_AND_TABLE_LENGTH = 2; public Map> checkSelectSqlAndGetDbAndTable(String sql) throws ParseException, SemanticException { ParseDriver pd = new ParseDriver(); @@ -96,8 +96,4 @@ private void getAndSaveTable(ASTNode node) throws HiveSqlParseException { } } - public static void main(String[] args) throws SemanticException, ParseException { - HiveSqlParser hiveSqlParser = new HiveSqlParser(); - System.out.println(hiveSqlParser.checkSelectSqlAndGetDbAndTable("select a from db1.table1 where ds=10")); - } } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/parser/LocaleParser.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/parser/LocaleParser.java index 4bef872f..87b3f58b 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/parser/LocaleParser.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/parser/LocaleParser.java @@ -16,6 +16,7 @@ package com.webank.wedatasphere.qualitis.parser; +import com.webank.wedatasphere.qualitis.LocalConfig; import org.apache.commons.lang.LocaleUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; @@ -33,35 +34,46 @@ */ @Component public class LocaleParser { - private static final Pattern KEY_WORD_PATTERN = Pattern.compile("\\{&.*?}"); - private static final String ZH_CN_1 = "zh-CN"; - private static final String ZH_CN_2 = "zh_CN"; + private static final String EN = "en"; + private static final Logger LOGGER = LoggerFactory.getLogger(LocaleParser.class); @Autowired private MessageSource messageSource; + @Autowired + private LocalConfig localConfig; public String replacePlaceHolderByLocale(String message, String localeStr) { + if (StringUtils.isBlank(message)) { + return ""; + } Locale locale; try { - locale = LocaleUtils.toLocale(localeStr); + if (StringUtils.isNotBlank(localeStr)) { + locale = LocaleUtils.toLocale(localeStr); + } else { + if (localConfig.getLocal() != null) { + if (localConfig.getLocal().equals(EN)) { + locale = Locale.US; + } else { + locale = Locale.CHINA; + } + } else { + locale = Locale.CHINA; + } + } } catch (Exception e) { - LOGGER.warn("Failed to get locale: {}, set according to actual parameters", localeStr.replace("\r", "").replace("\n", "")); - if (ZH_CN_1.equals(localeStr) || ZH_CN_2.equals(localeStr)) { - locale = Locale.CHINA; + if (localConfig.getLocal() != null) { + if (localConfig.getLocal().equals(EN)) { + locale = Locale.US; + } else { + locale = Locale.CHINA; + } } else { - locale = Locale.US; + locale = Locale.CHINA; } } - if (StringUtils.isBlank(localeStr)) { - LOGGER.warn("Failed to get locale: {}, set to default en_US", localeStr.replace("\r", "").replace("\n", "")); - locale = Locale.US; - } - if (!locale.equals(Locale.US) && !locale.equals(Locale.CHINA)) { - LOGGER.warn("Does not support locale: {}, set to default en_US", localeStr.replace("\r", "").replace("\n", "")); - locale = Locale.US; - } Matcher m = KEY_WORD_PATTERN.matcher(message); try { while (m.find()) { diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/response/GeneralResponse.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/response/GeneralResponse.java index 4b92e945..2652989e 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/response/GeneralResponse.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/response/GeneralResponse.java @@ -22,7 +22,6 @@ * @author howeye */ public class GeneralResponse implements Serializable { - private String code; private String message; private T data; diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityJob.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityJob.java index 0c303096..f1970408 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityJob.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityJob.java @@ -23,9 +23,10 @@ * @author howeye */ public class DataQualityJob { - private List jobCode; private Long taskId; + private String user; + private String startupParam; public DataQualityJob() { jobCode = new ArrayList<>(); @@ -39,6 +40,14 @@ public void setJobCode(List jobCode) { this.jobCode = jobCode; } + public String getUser() { + return user; + } + + public void setUser(String user) { + this.user = user; + } + public Long getTaskId() { return taskId; } @@ -47,6 +56,14 @@ public void setTaskId(Long taskId) { this.taskId = taskId; } + public String getStartupParam() { + return startupParam; + } + + public void setStartupParam(String startupParam) { + this.startupParam = startupParam; + } + @Override public String toString() { return "DataQualityJob{" + diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/DpmConfig.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/DpmConfig.java new file mode 100644 index 00000000..bd120d7e --- /dev/null +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/DpmConfig.java @@ -0,0 +1,77 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.config; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Configuration; + +/** + * @author allenzhou + */ +@Configuration +public class DpmConfig { + @Value("${linkis.api.meta_data.dpm_server}") + private String datasourceServer; + @Value("${linkis.api.meta_data.dpm_port}") + private Integer datasourcePort; + @Value("${linkis.api.meta_data.dpm_inf}") + private String datasourceInf; + @Value("${linkis.api.meta_data.dpm_systemAppId}") + private String datasourceSystemAppId; + @Value("${linkis.api.meta_data.dpm_systemAppKey}") + private String datasourceSystemAppKey; + + public String getDatasourceServer() { + return datasourceServer; + } + + public void setDatasourceServer(String datasourceServer) { + this.datasourceServer = datasourceServer; + } + + public Integer getDatasourcePort() { + return datasourcePort; + } + + public void setDatasourcePort(Integer datasourcePort) { + this.datasourcePort = datasourcePort; + } + + public String getDatasourceInf() { + return datasourceInf; + } + + public void setDatasourceInf(String datasourceInf) { + this.datasourceInf = datasourceInf; + } + + public String getDatasourceSystemAppId() { + return datasourceSystemAppId; + } + + public void setDatasourceSystemAppId(String datasourceSystemAppId) { + this.datasourceSystemAppId = datasourceSystemAppId; + } + + public String getDatasourceSystemAppKey() { + return datasourceSystemAppKey; + } + + public void setDatasourceSystemAppKey(String datasourceSystemAppKey) { + this.datasourceSystemAppKey = datasourceSystemAppKey; + } +} diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/OptimizationConfig.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/OptimizationConfig.java new file mode 100644 index 00000000..ca30c58b --- /dev/null +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/OptimizationConfig.java @@ -0,0 +1,37 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.config; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Configuration; + +/** + * @author allenzhou + */ +@Configuration +public class OptimizationConfig { + @Value("${linkis.lightweight_query}") + private Boolean lightweightQuery; + + public Boolean getLightweightQuery() { + return lightweightQuery; + } + + public void setLightweightQuery(Boolean lightweightQuery) { + this.lightweightQuery = lightweightQuery; + } +} diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/TaskDataSourceConfig.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/TaskDataSourceConfig.java new file mode 100644 index 00000000..0b61d0d6 --- /dev/null +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/TaskDataSourceConfig.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.qualitis.config; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Configuration; + +/** + * @author allenzhou + */ +@Configuration +public class TaskDataSourceConfig { + @Value("${task.persistent.password}") + private String password; + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } +} diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/AbstractTemplateConverter.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/AbstractTemplateConverter.java index 058bdbb7..db57baa1 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/AbstractTemplateConverter.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/AbstractTemplateConverter.java @@ -22,33 +22,32 @@ import com.webank.wedatasphere.qualitis.exception.DataQualityTaskException; import com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException; import com.webank.wedatasphere.qualitis.exception.RuleVariableNotSupportException; -import com.webank.wedatasphere.qualitis.bean.DataQualityJob; -import com.webank.wedatasphere.qualitis.exception.ConvertException; -import com.webank.wedatasphere.qualitis.exception.DataQualityTaskException; -import com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException; -import com.webank.wedatasphere.qualitis.exception.RuleVariableNotSupportException; -import com.webank.wedatasphere.qualitis.bean.DataQualityJob; -import com.webank.wedatasphere.qualitis.bean.DataQualityTask; -import com.webank.wedatasphere.qualitis.exception.ConvertException; -import com.webank.wedatasphere.qualitis.exception.DataQualityTaskException; -import com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException; -import com.webank.wedatasphere.qualitis.exception.RuleVariableNotSupportException; +import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; +import java.io.IOException; +import java.util.Date; +import java.util.Map; /** * @author howeye */ public abstract class AbstractTemplateConverter { - - /** - * Convert Task into code that can be executed + * Convert Task into code that can be executed. * @param dataQualityTask + * @param date + * @param setFlag + * @param execParams + * @param runDate + * @param clusterType + * @param dataSourceMysqlConnect * @return * @throws ConvertException * @throws DataQualityTaskException * @throws RuleVariableNotSupportException * @throws RuleVariableNotFoundException + * @throws IOException + * @throws UnExpectedRequestException */ - public abstract DataQualityJob convert(DataQualityTask dataQualityTask) throws ConvertException, DataQualityTaskException, RuleVariableNotSupportException, RuleVariableNotFoundException; - + public abstract DataQualityJob convert(DataQualityTask dataQualityTask, Date date, String setFlag, Map execParams, String runDate, + String clusterType, Map dataSourceMysqlConnect) throws ConvertException, DataQualityTaskException, RuleVariableNotSupportException, RuleVariableNotFoundException, IOException, UnExpectedRequestException; } diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/SqlTemplateConverter.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/SqlTemplateConverter.java index 4a22f667..d9f1b5c7 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/SqlTemplateConverter.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/SqlTemplateConverter.java @@ -19,45 +19,49 @@ import com.webank.wedatasphere.qualitis.bean.DataQualityJob; import com.webank.wedatasphere.qualitis.bean.DataQualityTask; import com.webank.wedatasphere.qualitis.bean.RuleTaskDetail; +import com.webank.wedatasphere.qualitis.config.DpmConfig; +import com.webank.wedatasphere.qualitis.config.OptimizationConfig; +import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; +import com.webank.wedatasphere.qualitis.entity.RuleMetric; import com.webank.wedatasphere.qualitis.exception.ConvertException; import com.webank.wedatasphere.qualitis.exception.DataQualityTaskException; import com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException; import com.webank.wedatasphere.qualitis.exception.RuleVariableNotSupportException; +import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; +import com.webank.wedatasphere.qualitis.metadata.constant.RuleConstraintEnum; import com.webank.wedatasphere.qualitis.rule.constant.InputActionStepEnum; import com.webank.wedatasphere.qualitis.rule.constant.RuleTemplateTypeEnum; import com.webank.wedatasphere.qualitis.rule.constant.TemplateInputTypeEnum; -import com.webank.wedatasphere.qualitis.rule.entity.*; -import com.webank.wedatasphere.qualitis.translator.AbstractTranslator; -import com.webank.wedatasphere.qualitis.util.DateExprReplaceUtil; -import com.webank.wedatasphere.qualitis.bean.DataQualityJob; -import com.webank.wedatasphere.qualitis.bean.DataQualityTask; -import com.webank.wedatasphere.qualitis.bean.RuleTaskDetail; -import com.webank.wedatasphere.qualitis.exception.ConvertException; -import com.webank.wedatasphere.qualitis.exception.DataQualityTaskException; -import com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException; -import com.webank.wedatasphere.qualitis.exception.RuleVariableNotSupportException; -import com.webank.wedatasphere.qualitis.rule.constant.InputActionStepEnum; -import com.webank.wedatasphere.qualitis.rule.constant.RuleTemplateTypeEnum; -import com.webank.wedatasphere.qualitis.rule.constant.TemplateInputTypeEnum; +import com.webank.wedatasphere.qualitis.rule.entity.AlarmConfig; import com.webank.wedatasphere.qualitis.rule.entity.Rule; import com.webank.wedatasphere.qualitis.rule.entity.RuleDataSource; +import com.webank.wedatasphere.qualitis.rule.entity.RuleDataSourceMapping; import com.webank.wedatasphere.qualitis.rule.entity.RuleVariable; +import com.webank.wedatasphere.qualitis.rule.entity.Template; +import com.webank.wedatasphere.qualitis.rule.entity.TemplateMidTableInputMeta; import com.webank.wedatasphere.qualitis.rule.entity.TemplateStatisticsInputMeta; import com.webank.wedatasphere.qualitis.translator.AbstractTranslator; import com.webank.wedatasphere.qualitis.util.DateExprReplaceUtil; -import java.util.regex.Matcher; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - +import java.io.IOException; +import java.text.SimpleDateFormat; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.util.Strings; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; /** * SQL Template Converter, can convert task into sql code @@ -68,30 +72,121 @@ */ @Component public class SqlTemplateConverter extends AbstractTemplateConverter { - @Autowired private AbstractTranslator abstractTranslator; + @Autowired + private OptimizationConfig optimizationConfig; + @Autowired + private DpmConfig dpmConfig; + + /** + * For 2149 template mid input meta special solve. + */ + private static final String EN_LINE_PRIMARY_REPEAT = "Field Replace Null Concat"; + private static final String CN_LINE_PRIMARY_REPEAT = "替换空字段拼接"; - public static final String VARIABLE_NAME_PLACEHOLDER = "${VARIABLE}"; private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile(".*\\$\\{(.*)}.*"); private static final Pattern AGGREGATE_FUNC_PATTERN = Pattern.compile("[a-zA-Z]+\\([0-9a-zA-Z_]+\\)"); - private static final String SPARK_SQL_TEMPLATE_PLACEHOLDER = "${SQL}"; + private static final String SAVE_MID_TABLE_NAME_PLACEHOLDER = "${TABLE_NAME}"; + private static final String SPARK_SQL_TEMPLATE_PLACEHOLDER = "${SQL}"; + public static final String VARIABLE_NAME_PLACEHOLDER = "${VARIABLE}"; private static final String FILTER_PLACEHOLDER = "${filter}"; private static final String FILTER_LEFT_PLACEHOLDER = "${filter_left}"; private static final String FILTER_RIGHT_PLACEHOLDER = "${filter_right}"; - private static final Integer COMMON_RULE = 1; + private static final Integer SINGLE_RULE = 1; private static final Integer CUSTOM_RULE = 2; private static final Integer MUL_SOURCE_RULE = 3; + /** + * Multi table solve. + */ + private static final Long MUL_SOURCE_ACCURACY_TEMPLATE_ID = 17L; + private static final Long MUL_SOURCE_COMMON_TEMPLATE_ID = 19L; + + /** + * Dpm properties. + */ + private static final String DPM = "dpm"; + /** + * Cluster type end with this, it is new links version. + */ + private static final String LINKIS_ONE_VERSION = "1.0"; private static final String SPARK_SQL_TEMPLATE = "val " + VARIABLE_NAME_PLACEHOLDER + " = spark.sql(\"" + SPARK_SQL_TEMPLATE_PLACEHOLDER + "\");"; - private static final String SAVE_MID_TABLE_SENTENCE_TEMPLATE = VARIABLE_NAME_PLACEHOLDER + ".write.saveAsTable(\"" + SAVE_MID_TABLE_NAME_PLACEHOLDER + "\");"; + private static final String SPARK_MYSQL_TEMPLATE = "val " + VARIABLE_NAME_PLACEHOLDER + " = spark.read.format(\"jdbc\").option(\"driver\",\"${JDBC_DRIVER}\").option(\"url\",\"jdbc:mysql://${MYSQL_IP}:${MYSQL_PORT}/\").option(\"user\",\"${MYSQL_USER}\").option(\"password\",\"${MYSQL_PASSWORD}\").option(\"query\",\"${SQL}\").load();"; + private static final String IF_EXIST = "if (spark.catalog.tableExists(\"" + SAVE_MID_TABLE_NAME_PLACEHOLDER + "\")) {"; + private static final String ELSE_EXIST = "} else {"; + private static final String END_EXIST = "}"; + + private static final String SAVE_MID_TABLE_SENTENCE_TEMPLATE_CREATE = VARIABLE_NAME_PLACEHOLDER + ".withColumn(\"qualitis_partition_key\", lit(\"${QUALITIS_PARTITION_KEY}\"))" + + ".write.mode(\"append\").partitionBy(\"qualitis_partition_key\").format(\"hive\").saveAsTable(\"" + SAVE_MID_TABLE_NAME_PLACEHOLDER + "\");"; + private static final String SAVE_MID_TABLE_SENTENCE_TEMPLATE_INSERT_OVERWRITE_PARTITION = VARIABLE_NAME_PLACEHOLDER + ".withColumn(\"qualitis_partition_key\", lit(\"${QUALITIS_PARTITION_KEY}\"))" + + ".write.mode(\"overwrite\").insertInto(\"" + SAVE_MID_TABLE_NAME_PLACEHOLDER + "\");"; + + private static final String FPS_SOURCE = "val {TMP_SOURCE} = \"\"\"{\"path\":\"/apps-data/hadoop/{CLUSTER_TYPE}/{USER}/fps/{FPS_TALBE}{FPS_TYPE}\",\"pathType\":\"hdfs\",\"encoding\":\"utf-8\",\"fieldDelimiter\":\"\",\"hasHeader\":{FILE_HEADER},\"sheet\":\"{SHEET_NAME}\",\"quote\":\"\",\"escapeQuotes\":false}\"\"\""; + private static final String FPS_DESTINATION = "val {TMP_DESTINATION} = \"\"\"{\"database\":\"{FPS_DB}\",\"tableName\":\"{FPS_TALBE}\",\"importData\":false,\"isPartition\":false,\"partition\":\"\",\"partitionValue\":\"\",\"isOverwrite\":true,\"columns\":{COLUMN_LIST}}\"\"\""; + private static final String FPS_IMPORT = "com.webank.wedatasphere.linkis.engine.imexport.LoadData.loadDataToTable(spark,{TMP_SOURCE},{TMP_DESTINATION})"; + private static final String FPS_DROP_TABLE = "spark.sql(\"drop table {FPS_DB}.{FPS_TALBE}\")"; + private static final String FPS_TO_HIVE_WITH_HEADER = "val {DF} =spark.read.option(\"header\", \"true\").option(\"delimiter\", \"{DELIMITER}\").option(\"inferSchema\", \"true\").csv(\"{HDFS_PREFIX}{FPS_FILE_PATH}\")"; + private static final String FPS_FILE_PATH = "/apps-data/hadoop/{CLUSTER_TYPE}/{USER}/fps/"; + private static final String IMPORT_SCHEMA = "import org.apache.spark.sql.types._"; + private static final String CONSTRUCT_SCHEMA = "val {SCHEMA} = new StructType()"; + private static final String CONSTRUCT_FIELD = ".add(\"{FIELD_NAME}\", {FIELD_TYPE}, true)"; + private static final String FPS_TO_HIVE_WITH_SCHEMA = "val {DF} = spark.read.option(\"delimiter\", \"{DELIMITER}\").schema({SCHEMA}).csv(\"{HDFS_PREFIX}{FPS_FILE_PATH}\")"; + private static final String DF_REGISTER = "{DF}.registerTempTable(\"{FILE_NAME}\")"; + + /** + * Common static field. + */ + private static final String AND = "and"; + + private static final Map FILE_TYPE_SUFFIX = new HashMap(4){{ + put(".txt","text"); + put(".csv", "csv"); + put(".xlsx", "excel"); + put(".xls", "excel"); + }}; + + private static final Map JDBC_DRIVER = new HashMap(4){{ + put("mysql","com.mysql.jdbc.Driver"); + put("tdsql", "com.mysql.jdbc.Driver"); + put("oracle", "oracle.jdbc.driver.OracleDriver"); + put("sqlserver", "com.microsoft.jdbc.sqlserver.SQLServerDriver"); + }}; + + private static final Map DATE_FORMAT = new HashMap(4){{ + put("1","yyyyMMdd"); + put("2", "yyyy-MM-dd"); + put("3", "yyyy.MM.dd"); + put("4", "yyyy/MM/dd"); + }}; + + private static final Map FIELD_TYPE = new HashMap(12){{ + put("tinyint","ByteType"); + put("smallint", "ShortType"); + put("int","IntegerType"); + put("bigint", "LongType"); + put("double", "DoubleType"); + put("float", "FloatType"); + put("decimal", "DecimalType(38,24)"); + put("string", "StringType"); + put("char", "StringType"); + put("varchar", "StringType"); + put("timestamp", "TimestampType"); + put("date", "DateType"); + }}; private static final Logger LOGGER = LoggerFactory.getLogger(SqlTemplateConverter.class); /** * Convert task into scala code * @param dataQualityTask + * @param date + * @param setFlag + * @param execParams + * @param runDate + * @param clusterType + * @param dataSourceMysqlConnect * @return * @throws ConvertException * @throws DataQualityTaskException @@ -99,7 +194,10 @@ public class SqlTemplateConverter extends AbstractTemplateConverter { * @throws RuleVariableNotFoundException */ @Override - public DataQualityJob convert(DataQualityTask dataQualityTask) throws ConvertException, DataQualityTaskException, RuleVariableNotSupportException, RuleVariableNotFoundException { + public DataQualityJob convert(DataQualityTask dataQualityTask, Date date, String setFlag, Map execParams, String runDate + , String clusterType, Map dataSourceMysqlConnect) + throws ConvertException, DataQualityTaskException, RuleVariableNotSupportException, RuleVariableNotFoundException, IOException, UnExpectedRequestException { + LOGGER.info("Start to convert template to actual code, task: " + dataQualityTask); if (null == dataQualityTask || dataQualityTask.getRuleTaskDetails().isEmpty()) { throw new DataQualityTaskException("Task can not be null or empty"); @@ -108,157 +206,518 @@ public DataQualityJob convert(DataQualityTask dataQualityTask) throws ConvertExc List initSentence = abstractTranslator.getInitSentence(); job.getJobCode().addAll(initSentence); LOGGER.info("Succeed to get init code. codes: " + initSentence); + if (StringUtils.isNotBlank(setFlag)) { + LOGGER.info("Start to solve with set flag. Spark set conf string: {}", setFlag); + String[] setStrs = setFlag.split(SpecCharEnum.DIVIDER.getValue()); + for (String str : setStrs) { + job.getJobCode().add("spark.sql(\"set " + str + "\")"); + } + LOGGER.info("Finish to solve with set flag."); + } int count = 0; for (RuleTaskDetail ruleTaskDetail : dataQualityTask.getRuleTaskDetails()) { count++; - if (ruleTaskDetail.getRule().getChildRule() != null) { - String parentMidTableName = ruleTaskDetail.getMidTableName().split(",")[0]; - String childMidTableName = ruleTaskDetail.getMidTableName().split(",")[1]; - ruleTaskDetail.setMidTableName(parentMidTableName); - - List codes = generateSparkSqlByTask(ruleTaskDetail.getRule().getChildRule(), dataQualityTask.getApplicationId(), - childMidTableName, dataQualityTask.getCreateTime(), dataQualityTask.getPartition(), count); - job.getJobCode().addAll(codes); - LOGGER.info("Succeed to convert rule into code. rule_id: {}, rul_name: {}, codes: {}", ruleTaskDetail.getRule().getId(), ruleTaskDetail.getRule().getName(), codes); - count ++; - // Fix variables' sequence when solving child rule of parent rule. Solving widget influence. - count ++; - } - List codes = generateSparkSqlByTask(ruleTaskDetail.getRule(), dataQualityTask.getApplicationId(), - ruleTaskDetail.getMidTableName(), dataQualityTask.getCreateTime(), dataQualityTask.getPartition(), count); + List codes = generateSparkSqlByTask(ruleTaskDetail.getRule(), date, dataQualityTask.getApplicationId(), ruleTaskDetail.getMidTableName() + , dataQualityTask.getCreateTime(), new StringBuffer(dataQualityTask.getPartition()), execParams, count, runDate, dataSourceMysqlConnect); job.getJobCode().addAll(codes); LOGGER.info("Succeed to convert rule into code. rule_id: {}, rul_name: {}, codes: {}", ruleTaskDetail.getRule().getId(), ruleTaskDetail.getRule().getName(), codes); } - LOGGER.info("Succeed to convert all template into actual code"); + LOGGER.info("Succeed to convert all rule into actual scala code."); job.setTaskId(dataQualityTask.getTaskId()); + job.setStartupParam(dataQualityTask.getStartupParam()); return job; } /** * Convert task into scala code * @param rule + * @param date * @param applicationId * @param midTableName * @param createTime * @param partition + * @param execParams * @param count + * @param runDate + * @param dataSourceMysqlConnect * @return * @throws ConvertException * @throws RuleVariableNotSupportException * @throws RuleVariableNotFoundException */ - private List generateSparkSqlByTask(Rule rule, String applicationId, String midTableName, String createTime, String partition, Integer count) throws ConvertException, RuleVariableNotSupportException, RuleVariableNotFoundException { + private List generateSparkSqlByTask(Rule rule, Date date, String applicationId, String midTableName, String createTime, StringBuffer partition + , Map execParams, int count, String runDate, Map dataSourceMysqlConnect) + throws ConvertException, RuleVariableNotSupportException, RuleVariableNotFoundException, UnExpectedRequestException { + List sqlList = new ArrayList<>(); - // Get SQL from template - String templateMidTableAction = rule.getTemplate().getMidTableAction(); + // Collect rule metric and build in save sentence sql. + List ruleMetrics = rule.getAlarmConfigs().stream().map(AlarmConfig::getRuleMetric).distinct().collect(Collectors.toList()); + Map ruleMetricMap = new HashMap<>(ruleMetrics.size()); + if (CollectionUtils.isNotEmpty(ruleMetrics)) { + LOGGER.info("Start to get rule metric for task result save. Rule metrics: {}", Arrays.toString(ruleMetrics.toArray())); + for (RuleMetric ruleMetric : ruleMetrics) { + if (ruleMetric != null) { + ruleMetricMap.put(ruleMetric.getName(), ruleMetric.getId()); + } + } + LOGGER.info("Finish to get rule metric for task result save."); + } + // Get SQL from template after remove '\n' + String templateMidTableAction = rule.getTemplate().getMidTableAction().replace("\n"," "); + Map filters = new HashMap<>(2); + if (CUSTOM_RULE.intValue() == rule.getRuleType()) { + templateMidTableAction = customMidTableActionUpdate(rule, templateMidTableAction, date, execParams, partition, ruleMetricMap); + } else if (MUL_SOURCE_RULE.intValue() == rule.getRuleType()) { + templateMidTableAction = multiMidTableActionUpdate(rule, templateMidTableAction, date, filters); + } // Get input meta from template List inputMetaRuleVariables = rule.getRuleVariables().stream().filter( ruleVariable -> ruleVariable.getInputActionStep().equals(InputActionStepEnum.TEMPLATE_INPUT_META.getCode())).collect(Collectors.toList()); - // Change filter - if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode())) { - Set ruleDataSources = rule.getRuleDataSources(); - if (rule.getParentRule() != null) { - ruleDataSources = new HashSet<>(); - Set parentRuleDataSources = rule.getParentRule().getRuleDataSources(); - for (RuleDataSource ruleDataSource : parentRuleDataSources) { - RuleDataSource tmp = new RuleDataSource(ruleDataSource); - if (tmp.getDatasourceIndex() == 0) { - tmp.setDatasourceIndex(1); - } else { - tmp.setDatasourceIndex(0); - } - ruleDataSources.add(tmp); + // If partition is not specified, replace with filter in rule configuration. + if (StringUtils.isBlank(partition.toString())) { + templateMidTableAction = fillPartitionWithRuleConfiguration(partition, rule, templateMidTableAction, inputMetaRuleVariables); + } + // Get dbs and tables + Map dbTableMap = new HashMap<>(4); + // Get mappings + StringBuffer mappings = new StringBuffer(); + StringBuffer realFilter = new StringBuffer(); + // Get SQL From template and replace all replaceholders + String midTableAction = replaceVariable(templateMidTableAction, inputMetaRuleVariables, partition.toString(), realFilter, dbTableMap, mappings, date); + + Set templateStatisticsAction = rule.getTemplate().getStatisticAction(); + Map sourceConnect = new HashMap(8); + Map targetConnect = new HashMap(8); + if (dataSourceMysqlConnect != null && dataSourceMysqlConnect.size() > 0) { + for (RuleDataSource ruleDataSource : rule.getRuleDataSources()) { + Map connectParams = dataSourceMysqlConnect.get(ruleDataSource.getId()); + if (connectParams == null) { + continue; } - } - for (RuleDataSource ruleDataSource : ruleDataSources) { - if (ruleDataSource.getDatasourceIndex().equals(0)) { - String leftFilter = ruleDataSource.getFilter(); - leftFilter = DateExprReplaceUtil.replaceDateExpr(leftFilter); - templateMidTableAction = templateMidTableAction.replace(FILTER_LEFT_PLACEHOLDER, leftFilter); - } else { - String rightFilter = ruleDataSource.getFilter(); - rightFilter = DateExprReplaceUtil.replaceDateExpr(rightFilter); - templateMidTableAction = templateMidTableAction.replace(FILTER_RIGHT_PLACEHOLDER, rightFilter); + if (ruleDataSource.getDatasourceIndex() != null && ruleDataSource.getDatasourceIndex().equals(0)) { + // If mysql sec, decrypt password and user name. + sourceConnect = dataSourceMysqlConnect.get(ruleDataSource.getId()); + } + if (ruleDataSource.getDatasourceIndex() != null && ruleDataSource.getDatasourceIndex().equals(1)) { + // If mysql sec, decrypt password and user name. + targetConnect = dataSourceMysqlConnect.get(ruleDataSource.getId()); } } } - if (StringUtils.isBlank(partition)) { - if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.SINGLE_SOURCE_TEMPLATE.getCode())) { - partition = new ArrayList<>(rule.getRuleDataSources()).get(0).getFilter(); - } else if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.CUSTOM.getCode())) { - partition = rule.getWhereContent(); - } else if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode())) { - // Replace placeholder - partition = ""; - List filterVariable = inputMetaRuleVariables.stream().filter( - r -> r.getTemplateMidTableInputMeta().getInputType().equals(TemplateInputTypeEnum.CONDITION.getCode())) - .collect(Collectors.toList()); - if (!filterVariable.isEmpty()) { - partition = filterVariable.get(0).getValue(); + sqlList.add("val UUID = java.util.UUID.randomUUID.toString"); + // 跨表规则 + if (RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode().equals(rule.getTemplate().getTemplateType()) && dbTableMap.size() > 0) { + // Import sql function. + sqlList.addAll(getImportSql()); + // Generate UUID. + // Transform original table. + Set columns = new HashSet<>(); + if (rule.getTemplate().getId().longValue() == MUL_SOURCE_ACCURACY_TEMPLATE_ID.longValue()) { + // Get accuracy columns. + columns = rule.getRuleDataSourceMappings().stream().map(RuleDataSourceMapping::getLeftColumnNames) + .map(column -> column.replace("tmp1.", "").replace("tmp2.", "")).collect(Collectors.toSet()); + } + if (rule.getTemplate().getId().longValue() == MUL_SOURCE_COMMON_TEMPLATE_ID.longValue()) { + sqlList.addAll(getCommonTransformSql(dbTableMap, mappings, count, partition.toString(), filters, sourceConnect, targetConnect)); + } else { + sqlList.addAll(getSpecialTransformSql(dbTableMap, count, partition.toString(), filters, Strings.join(columns, ','), sourceConnect, targetConnect)); + if (optimizationConfig.getLightweightQuery()) { + count += 3; } } - } - // Get SQL From template and replace all replaceholders - String midTableAction = replaceVariable(templateMidTableAction, inputMetaRuleVariables, partition); - Set templateStatisticsAction = rule.getTemplate().getStatisticAction(); + sqlList.addAll(getSaveMidTableSentenceSettings()); + sqlList.addAll(getSaveMidTableSentence(midTableName, count, runDate)); + } else { + // Generate select statement and save into hive database + RuleDataSource ruleDataSource = rule.getRuleDataSources().stream().filter(dataSource -> dataSource.getDatasourceIndex() == null).iterator().next(); + Map connParams = dataSourceMysqlConnect.get(ruleDataSource.getId()); + if (connParams != null) { + connParams = dataSourceMysqlConnect.get(ruleDataSource.getId()); + } + + sqlList.addAll(generateSparkSqlAndSaveSentence(midTableAction, midTableName, rule.getTemplate(), count, connParams, runDate)); + count ++; + } - // Generate select statement and save into hive database - sqlList.addAll(generateSparkSqlAndSaveSentence(midTableAction, midTableName, rule.getTemplate().getSaveMidTable(), count)); - count ++; // Generate statistics statement, and save into mysql List statisticsRuleVariables = rule.getRuleVariables().stream().filter( - ruleVariable -> ruleVariable.getInputActionStep().equals(InputActionStepEnum.STATISTICS_ARG.getCode())).collect(Collectors.toList()); - sqlList.addAll(saveStatisticAndSaveMySqlSentence(rule.getId(), templateStatisticsAction, applicationId, statisticsRuleVariables, createTime, count)); + ruleVariable -> ruleVariable.getInputActionStep().equals(InputActionStepEnum.STATISTICS_ARG.getCode())).collect(Collectors.toList()); + + sqlList.addAll(saveStatisticAndSaveMySqlSentence(rule.getId(), ruleMetricMap, templateStatisticsAction, applicationId, statisticsRuleVariables + , createTime, count, runDate)); return sqlList; } - private List saveStatisticAndSaveMySqlSentence(Long ruleId, Set templateStatisticsInputMetas, String applicationId, List ruleVariables, - String createTime, Integer count) throws RuleVariableNotSupportException, RuleVariableNotFoundException { - return abstractTranslator.persistenceTranslate(ruleId, templateStatisticsInputMetas, applicationId, ruleVariables, createTime, count); + private String customMidTableActionUpdate(Rule rule, String templateMidTableAction, Date date, Map execParams, + StringBuffer partition, Map ruleMetricMap) throws UnExpectedRequestException { + if (StringUtils.isNotBlank(rule.getCsId())) { + templateMidTableAction = templateMidTableAction.replace(RuleConstraintEnum.CUSTOM_DATABASE_PREFIS.getValue().concat(SpecCharEnum.PERIOD.getValue()), ""); + } + if (StringUtils.isNotBlank(partition.toString())) { + templateMidTableAction = templateMidTableAction.replace("${filter}", partition.toString()); + } else if (StringUtils.isNotBlank(rule.getWhereContent())){ + templateMidTableAction = templateMidTableAction.replace("${filter}", rule.getWhereContent()); + } + for (String key : execParams.keySet()) { + templateMidTableAction = templateMidTableAction.replace("${" + key + "}", execParams.get(key)); + } + templateMidTableAction = DateExprReplaceUtil.replaceRunDate(date, templateMidTableAction); + + Set ruleMetricNames = ruleMetricMap.keySet(); + for (String ruleMetricName : ruleMetricNames) { + String cleanRuleMetricName = ruleMetricName.replace("-", "_"); + templateMidTableAction = templateMidTableAction.replace(ruleMetricName, cleanRuleMetricName); + } + + return templateMidTableAction; + } + + private String multiMidTableActionUpdate(Rule rule, String templateMidTableAction, Date date, Map filters) throws UnExpectedRequestException { + Set ruleDataSources = rule.getRuleDataSources(); + if (rule.getParentRule() != null) { + ruleDataSources = new HashSet<>(); + Set parentRuleDataSources = rule.getParentRule().getRuleDataSources(); + for (RuleDataSource ruleDataSource : parentRuleDataSources) { + RuleDataSource tmp = new RuleDataSource(ruleDataSource); + if (tmp.getDatasourceIndex() == 0) { + tmp.setDatasourceIndex(1); + } else { + tmp.setDatasourceIndex(0); + } + ruleDataSources.add(tmp); + } + } + for (RuleDataSource ruleDataSource : ruleDataSources) { + if (ruleDataSource.getDatasourceIndex().equals(0)) { + String leftFilter = ruleDataSource.getFilter(); + leftFilter = DateExprReplaceUtil.replaceFilter(date, leftFilter); + templateMidTableAction = templateMidTableAction.replace(FILTER_LEFT_PLACEHOLDER, leftFilter); + filters.put("source_table", leftFilter); + } else { + String rightFilter = ruleDataSource.getFilter(); + rightFilter = DateExprReplaceUtil.replaceFilter(date, rightFilter); + templateMidTableAction = templateMidTableAction.replace(FILTER_RIGHT_PLACEHOLDER, rightFilter); + filters.put("target_table", rightFilter); + } + } + return templateMidTableAction; + } + + private String fillPartitionWithRuleConfiguration(StringBuffer partition, Rule rule, String templateMidTableAction, List inputMetaRuleVariables) { + if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.SINGLE_SOURCE_TEMPLATE.getCode())) { + partition.append(new ArrayList<>(rule.getRuleDataSources()).get(0).getFilter()); + } else if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.CUSTOM.getCode())) { + // Replace placeholder. + if (StringUtils.isNotEmpty(rule.getWhereContent())) { + partition.append(rule.getWhereContent()); + } + } else if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode())) { + // Replace placeholder. + partition.delete(0, partition.length()); + List filterVariable = inputMetaRuleVariables.stream().filter( + r -> r.getTemplateMidTableInputMeta().getInputType().equals(TemplateInputTypeEnum.CONDITION.getCode()) + ).collect(Collectors.toList()); + if (!filterVariable.isEmpty()) { + partition.append(filterVariable.get(0).getValue()); + } + } + return templateMidTableAction; + } + + private List getCommonTransformSql(Map dbTableMap, StringBuffer mappings, int count, String filter, Map filters + , Map sourceConnect, Map targetConnect) { + // Solve partition, value, hash value + List transformSql = new ArrayList<>(); + StringBuilder sourceSql = new StringBuilder(); + StringBuilder targetSql = new StringBuilder(); + + sourceSql.append("select *").append(" from ") + .append(dbTableMap.get("source_db")).append(dbTableMap.get("source_table")) + .append(" where ").append(filters.get("source_table")); + targetSql.append("select *").append(" from ") + .append(dbTableMap.get("target_db")).append(dbTableMap.get("target_table")) + .append(" where ").append(filters.get("target_table")); + if (sourceConnect != null && sourceConnect.size() > 0) { + String host = (String) sourceConnect.get("host"); + String port = (String) sourceConnect.get("port"); + String user = (String) sourceConnect.get("username"); + String pwd = (String) sourceConnect.get("password"); + String dataType = (String) sourceConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDF") + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDF").replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString())); + } + if (targetConnect != null && targetConnect.size() > 0) { + String host = (String) targetConnect.get("host"); + String port = (String) targetConnect.get("port"); + String user = (String) targetConnect.get("username"); + String pwd = (String) targetConnect.get("password"); + String dataType = (String) sourceConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDF_2") + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDF_2").replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString())); + } + + transformSql.add("originalDF.registerTempTable(\"tmp1\")"); + transformSql.add("originalDF_2.registerTempTable(\"tmp2\")"); + String commonJoin = "SELECT tmp1.* FROM tmp1 LEFT JOIN tmp2 ON " + mappings.toString() + " WHERE " + filter; + String variableName1 = getVariableName(count); + String joinSql = "val " + variableName1 + " = spark.sql(\"" + commonJoin + "\")"; + + transformSql.add(joinSql); + return transformSql; + } + + private List getSpecialTransformSql(Map dbTableMap, int count, String filter, Map filters + , String columns, Map sourceConnect, Map targetConnect) { + // Solve partition fields. + List partitionFields = new ArrayList<>(); + if (StringUtils.isNotBlank(filter)) { + filter = filter.toLowerCase().trim(); + if (filter.contains(AND)) { + List subPartition = Arrays.asList(filter.split(AND)); + for (String sub : subPartition) { + String partitionField = sub.trim().substring(0, sub.indexOf("=")); + partitionFields.add(partitionField); + } + } else { + String partitionField = filter.substring(0, filter.indexOf("=")); + partitionFields.add(partitionField); + } + } + + // Solve partition, value, hash value + List transformSql = new ArrayList<>(); + StringBuilder sourceSql = new StringBuilder(); + StringBuilder targetSql = new StringBuilder(); + if (CollectionUtils.isNotEmpty(partitionFields)) { + if (StringUtils.isNotBlank(columns)) { + sourceSql.append("select ").append(columns); + targetSql.append("select ").append(columns); + } else { + sourceSql.append("select *"); + targetSql.append("select *"); + } + sourceSql.append(" from ").append(dbTableMap.get("source_db")).append(dbTableMap.get("source_table")).append(" where ").append(filter); + targetSql.append(" from ").append(dbTableMap.get("target_db")).append(dbTableMap.get("target_table")).append(" where ").append(filter); + } else { + if (StringUtils.isNotBlank(columns)) { + sourceSql.append("select ").append(columns); + targetSql.append("select ").append(columns); + } else { + sourceSql.append("select *"); + targetSql.append("select *"); + } + sourceSql.append(" from ").append(dbTableMap.get("source_db")).append(dbTableMap.get("source_table")).append(" where ").append(filters.get("source_table")); + targetSql.append(" from ").append(dbTableMap.get("target_db")).append(dbTableMap.get("target_table")).append(" where ").append(filters.get("target_table")); + } + if (sourceConnect != null && sourceConnect.size() > 0) { + String host = (String) sourceConnect.get("host"); + String port = (String) sourceConnect.get("port"); + String user = (String) sourceConnect.get("username"); + String pwd = (String) sourceConnect.get("password"); + String dataType = (String) sourceConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDF") + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDF").replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString())); + } + if (targetConnect != null && targetConnect.size() > 0) { + String host = (String) targetConnect.get("host"); + String port = (String) targetConnect.get("port"); + String user = (String) targetConnect.get("username"); + String pwd = (String) targetConnect.get("password"); + String dataType = (String) targetConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDF_2") + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDF_2").replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString())); + } + // Full line to MD5 with dataframe api transformation. + fuleLineToHashLine(transformSql, partitionFields); + + String variableName1 = getVariableName(count); + if (optimizationConfig.getLightweightQuery()) { + count ++; + String variableName2 = getVariableName(count); + count ++; + String variableName3 = getVariableName(count); + count ++; + String variableName4 = getVariableName(count); + + String joinSql = "val " + variableName1 + " = spark.sql(\"SELECT qualitis_tmp1.qualitis_full_line_hash_value as left_full_hash_line, qualitis_tmp1.qualitis_mul_db_accuracy_num as left_full_line_num, qualitis_tmp2.qualitis_full_line_hash_value as right_full_hash_line, qualitis_tmp2.qualitis_mul_db_accuracy_num as right_full_line_num FROM (SELECT qualitis_full_line_hash_value, count(1) as qualitis_mul_db_accuracy_num FROM md5_table_3 WHERE true group by qualitis_full_line_hash_value) qualitis_tmp1 FULL OUTER JOIN (SELECT qualitis_full_line_hash_value, qualitis_full_line_hash_value, count(1) as qualitis_mul_db_accuracy_num FROM md5_table_4 WHERE true group by qualitis_full_line_hash_value) qualitis_tmp2 ON (qualitis_tmp1.qualitis_full_line_hash_value = qualitis_tmp2.qualitis_full_line_hash_value AND qualitis_tmp1.qualitis_mul_db_accuracy_num = qualitis_tmp2.qualitis_mul_db_accuracy_num) WHERE ( NOT (qualitis_tmp1.qualitis_full_line_hash_value is null AND qualitis_tmp1.qualitis_mul_db_accuracy_num is null) AND (qualitis_tmp2.qualitis_full_line_hash_value is null AND qualitis_tmp2.qualitis_mul_db_accuracy_num is null)) OR ( NOT (qualitis_tmp2.qualitis_full_line_hash_value is null AND qualitis_tmp2.qualitis_mul_db_accuracy_num is null) AND (qualitis_tmp1.qualitis_full_line_hash_value is null AND qualitis_tmp1.qualitis_mul_db_accuracy_num is null))\")"; + transformSql.add(joinSql); + transformSql.add(variableName1 + ".registerTempTable(\"md5_table_5\")"); + + String joinSqlWithLeft = "val " + variableName2 + " = spark.sql(\"\"\"SELECT \"left\" as source, md5_table_3.qualitis_full_line_value as full_line, md5_table_5.left_full_line_num FROM md5_table_3 full outer join md5_table_5 on md5_table_3.qualitis_full_line_hash_value = md5_table_5.left_full_hash_line where md5_table_3.qualitis_full_line_hash_value is not null and md5_table_5.left_full_hash_line is not null\"\"\")"; + String joinSqlWithRight = "val " + variableName3 + " = spark.sql(\"\"\"SELECT \"right\" as source, md5_table_4.qualitis_full_line_value as full_line, md5_table_5.right_full_line_num FROM md5_table_4 full outer join md5_table_5 on md5_table_4.qualitis_full_line_hash_value = md5_table_5.right_full_hash_line where md5_table_4.qualitis_full_line_hash_value is not null and md5_table_5.right_full_hash_line is not null\"\"\")"; + + transformSql.add(joinSqlWithLeft); + transformSql.add(joinSqlWithRight); + transformSql.add("val " + variableName4 + "=" + variableName2 + ".union(" + variableName3 + ")"); + } else { + String joinSql = "val " + variableName1 + " = spark.sql(\"SELECT qualitis_tmp1.qualitis_full_line_value as left_full_line, qualitis_tmp1.qualitis_mul_db_accuracy_num as left_full_line_num, qualitis_tmp2.qualitis_full_line_value as right_full_line, qualitis_tmp2.qualitis_mul_db_accuracy_num as right_full_line_num FROM (SELECT qualitis_full_line_value, qualitis_full_line_hash_value, count(1) as qualitis_mul_db_accuracy_num FROM md5_table_3 WHERE true group by qualitis_full_line_value, qualitis_full_line_hash_value) qualitis_tmp1 FULL OUTER JOIN (SELECT qualitis_full_line_value, qualitis_full_line_hash_value, count(1) as qualitis_mul_db_accuracy_num FROM md5_table_4 WHERE true group by qualitis_full_line_value, qualitis_full_line_hash_value) qualitis_tmp2 ON (qualitis_tmp1.qualitis_full_line_hash_value = qualitis_tmp2.qualitis_full_line_hash_value AND qualitis_tmp1.qualitis_mul_db_accuracy_num = qualitis_tmp2.qualitis_mul_db_accuracy_num) WHERE ( NOT (qualitis_tmp1.qualitis_full_line_hash_value is null AND qualitis_tmp1.qualitis_mul_db_accuracy_num is null) AND (qualitis_tmp2.qualitis_full_line_hash_value is null AND qualitis_tmp2.qualitis_mul_db_accuracy_num is null)) OR ( NOT (qualitis_tmp2.qualitis_full_line_hash_value is null AND qualitis_tmp2.qualitis_mul_db_accuracy_num is null) AND (qualitis_tmp1.qualitis_full_line_hash_value is null AND qualitis_tmp1.qualitis_mul_db_accuracy_num is null))\")"; + + transformSql.add(joinSql); + } + + return transformSql; } + private void fuleLineToHashLine(List transformSql, List partitionFields) { + transformSql.add("val fillNullDF = originalDF.na.fill(UUID)"); + transformSql.add("val qualitis_names = fillNullDF.schema.fieldNames"); + transformSql.add("val fileNullWithFullLineWithHashDF = fillNullDF.withColumn(\"qualitis_full_line_value\", to_json(struct($\"*\"))).withColumn(\"qualitis_full_line_hash_value\", md5(to_json(struct($\"*\"))))"); + transformSql.add("val qualitis_names_buffer = qualitis_names.toBuffer"); + + transformSql.add("val fillNullDF_2 = originalDF_2.na.fill(UUID)"); + transformSql.add("val qualitis_names_2 = fillNullDF_2.schema.fieldNames"); + transformSql.add("val fileNullWithFullLineWithHashDF_2 = fillNullDF_2.withColumn(\"qualitis_full_line_value\", to_json(struct($\"*\"))).withColumn(\"qualitis_full_line_hash_value\", md5(to_json(struct($\"*\"))))"); + transformSql.add("val qualitis_names_buffer_2 = qualitis_names_2.toBuffer"); + + for (String partitionField : partitionFields) { + transformSql.add("qualitis_names_buffer -= \"" + partitionField + "\""); + transformSql.add("qualitis_names_buffer_2 -= \"" + partitionField + "\""); + } + transformSql.add("val finalDF = fileNullWithFullLineWithHashDF.drop(qualitis_names_buffer:_*)"); + transformSql.add("val finalDF_2 = fileNullWithFullLineWithHashDF_2.drop(qualitis_names_buffer_2:_*)"); + + transformSql.add("finalDF.registerTempTable(\"md5_table_3\")"); + transformSql.add("finalDF_2.registerTempTable(\"md5_table_4\")"); + } + + private List getImportSql() { + List imports = new ArrayList<>(); + imports.add("import org.apache.spark.sql.types._"); + imports.add("import org.apache.spark.sql.functions._"); + return imports; + } + + private List saveStatisticAndSaveMySqlSentence(Long ruleId, Map ruleMetricIds, + Set templateStatisticsInputMetas, String applicationId, List ruleVariables, + String createTime, Integer count, String runDate) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + return abstractTranslator.persistenceTranslate(ruleId, ruleMetricIds, templateStatisticsInputMetas, applicationId, ruleVariables, createTime + , count, runDate); + } /** * Generate scala code of select statement and save into hive database * @param sql * @param saveTableName - * @param saveMidTable + * @param template * @param count + * @param connParams + * @param runDate * @return */ - private List generateSparkSqlAndSaveSentence(String sql, String saveTableName, Boolean saveMidTable, Integer count) { + private List generateSparkSqlAndSaveSentence(String sql, String saveTableName, Template template, Integer count, Map connParams, String runDate) { List sparkSqlList = new ArrayList<>(); - String sparkSqlSentence = getSparkSqlSentence(sql, count); + String sparkSqlSentence; + if (connParams == null) { + sparkSqlSentence = getSparkSqlSentence(sql, count); + } else { + sparkSqlSentence = getSparkSqlSententceWithMysqlConnParams(sql, count, connParams); + } + sparkSqlList.add(sparkSqlSentence); + List midTableInputNames = template.getTemplateMidTableInputMetas().stream().map(TemplateMidTableInputMeta::getName).collect(Collectors.toList()); + + boolean linePrimaryRepeat = CollectionUtils.isNotEmpty(midTableInputNames) && (midTableInputNames.contains(EN_LINE_PRIMARY_REPEAT) || midTableInputNames.contains(CN_LINE_PRIMARY_REPEAT)); + if (linePrimaryRepeat) { + sparkSqlList.add("val fillNullDF_" + count + " = " + getVariableName(count) + ".na.fill(UUID)"); + sparkSqlList.add("val fileNullWithFullLineWithHashDF_" + count + " = fillNullDF_" + count + ".withColumn(\"qualitis_full_line_value\", to_json(struct($\"*\"))).withColumn(\"md5\", md5(to_json(struct($\"*\"))))"); + sparkSqlList.add("fileNullWithFullLineWithHashDF_" + count + ".registerTempTable(\"tmp_table_" + count + "\")"); + sparkSqlList.add("val " + getVariableName(count) + " = spark.sql(\"select md5, count(1) as md5_count from tmp_table_" + count + " group by md5 having count(*) > 1\")"); + } + LOGGER.info("Succeed to generate spark sql. sentence: {}", sparkSqlSentence); // Fix bug in the workflow between widget node and qualitis node. String variableFormer = getVariableName(count); - String str1 = "val schemas = " + variableFormer + ".schema.fields.map(f => f.name).toList"; - String str2 = "val newSchemas = schemas.map(s => s.replaceAll(\"[()]\", \"\")).toList"; - // 后续变量序号加一 count ++; String variableLatter = getVariableName(count); + formatSchema(sparkSqlList, variableFormer, variableLatter); + // Fix bug end. + if (template.getSaveMidTable()) { + sparkSqlList.addAll(getSaveMidTableSentenceSettings()); + sparkSqlList.addAll(getSaveMidTableSentence(saveTableName, count, runDate)); + LOGGER.info("Succeed to generate spark sql. sentence."); + } + return sparkSqlList; + } + + private String getSparkSqlSententceWithMysqlConnParams(String sql, Integer count, Map connParams) { + sql = sql.replace("\"", "\\\""); + String host = (String) connParams.get("host"); + String port = (String) connParams.get("port"); + String user = (String) connParams.get("username"); + String pwd = (String) connParams.get("password"); + String dataType = (String) connParams.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sql) + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + return str.replace(VARIABLE_NAME_PLACEHOLDER, getVariableName(count)); + } + + private void formatSchema(List sparkSqlList, String variableFormer, String variableLatter) { + String str1 = "val schemas = " + variableFormer + ".schema.fields.map(f => f.name).toList"; + String str2 = "val newSchemas = schemas.map(s => s.replaceAll(\"[()]\", \"\")).toList"; String str3 = "val " + variableLatter + " = " + variableFormer + ".toDF(newSchemas: _*)"; sparkSqlList.add(str1); sparkSqlList.add(str2); sparkSqlList.add(str3); - // - if (saveMidTable) { - String midTableSentence = getSaveMidTableSentence(saveTableName, count); - sparkSqlList.add(midTableSentence); - LOGGER.info("Succeed to generate spark sql. sentence: {}", midTableSentence); - } - return sparkSqlList; } - private String getSaveMidTableSentence(String saveMidTableName, Integer count) { - String tmp = SAVE_MID_TABLE_SENTENCE_TEMPLATE.replace(SAVE_MID_TABLE_NAME_PLACEHOLDER, saveMidTableName); - return tmp.replace(VARIABLE_NAME_PLACEHOLDER, getVariableName(count)); + private List getSaveMidTableSentenceSettings() { + List settings = new ArrayList<>(); + settings.add("spark.sqlContext.setConf(\"hive.exec.dynamic.partition\", \"true\")"); + settings.add("spark.sqlContext.setConf(\"hive.exec.dynamic.partition.mode\", \"nonstrict\")"); + + settings.add("spark.conf.set(\"spark.sql.sources.partitionOverwriteMode\",\"dynamic\")"); + return settings; + } + + private List getSaveMidTableSentence(String saveMidTableName, Integer count, String runDate) { + Date date = new Date(); + SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd"); + List saveSqls = new ArrayList<>(); + saveSqls.add(IF_EXIST.replace(SAVE_MID_TABLE_NAME_PLACEHOLDER, saveMidTableName)); + saveSqls.add(SAVE_MID_TABLE_SENTENCE_TEMPLATE_INSERT_OVERWRITE_PARTITION.replace("${QUALITIS_PARTITION_KEY}", StringUtils.isBlank(runDate) ? format.format(date) : runDate) + .replace(SAVE_MID_TABLE_NAME_PLACEHOLDER, saveMidTableName).replace(VARIABLE_NAME_PLACEHOLDER, getVariableName(count))); + saveSqls.add(ELSE_EXIST); + saveSqls.add(SAVE_MID_TABLE_SENTENCE_TEMPLATE_CREATE.replace("${QUALITIS_PARTITION_KEY}", StringUtils.isBlank(runDate) ? format.format(date) : runDate) + .replace(SAVE_MID_TABLE_NAME_PLACEHOLDER, saveMidTableName).replace(VARIABLE_NAME_PLACEHOLDER, getVariableName(count))); + saveSqls.add(END_EXIST); + return saveSqls; } private String getSparkSqlSentence(String sql, Integer count) { @@ -272,16 +731,48 @@ private String getSparkSqlSentence(String sql, Integer count) { * @param template * @param variables * @param filter + * @param realFilter + * @param dbTableMap for pick up source db.table & target db.table + * @param mappings + * @param date * @return * @throws ConvertException */ - private String replaceVariable(String template, List variables, String filter) throws ConvertException { + private String replaceVariable(String template, List variables, String filter, StringBuffer realFilter, Map dbTableMap + , StringBuffer mappings, Date date) + throws ConvertException, UnExpectedRequestException { String sqlAction = template; - filter = DateExprReplaceUtil.replaceDateExpr(filter); - sqlAction = sqlAction.replace(FILTER_PLACEHOLDER, filter); - LOGGER.info("Succeed to replace {} into {}", FILTER_PLACEHOLDER, filter); + if (StringUtils.isNotBlank(filter)) { + String tmpfilter = DateExprReplaceUtil.replaceFilter(date, filter); + sqlAction = sqlAction.replace(FILTER_PLACEHOLDER, tmpfilter); + realFilter.append(tmpfilter); + LOGGER.info("Succeed to replace {} into {}", FILTER_PLACEHOLDER, tmpfilter); + } else { + realFilter.append("true"); + } for (RuleVariable ruleVariable : variables) { - String placeHolder = "\\$\\{" + ruleVariable.getTemplateMidTableInputMeta().getPlaceholder() + "}"; + String midInputMetaPlaceHolder = ruleVariable.getTemplateMidTableInputMeta().getPlaceholder(); + String placeHolder = "\\$\\{" + midInputMetaPlaceHolder + "}"; + // GeT source db and table, target db and table. + if ("source_db".equals(midInputMetaPlaceHolder)) { + if (StringUtils.isNotBlank(ruleVariable.getValue())) { + dbTableMap.put("source_db", ruleVariable.getValue() + "."); + } else { + dbTableMap.put("source_db", ""); + } + } else if ("source_table".equals(midInputMetaPlaceHolder)) { + dbTableMap.put("source_table", ruleVariable.getValue()); + } else if ("target_table".equals(midInputMetaPlaceHolder)) { + dbTableMap.put("target_table", ruleVariable.getValue()); + } else if ("target_db".equals(midInputMetaPlaceHolder)) { + if (StringUtils.isNotBlank(ruleVariable.getValue())) { + dbTableMap.put("target_db", ruleVariable.getValue() + "."); + } else { + dbTableMap.put("target_db", ""); + } + } else if ("mapping_argument".equals(midInputMetaPlaceHolder)) { + mappings.append(ruleVariable.getValue()); + } // Fix issue of wedget node in the front. if ("\\$\\{field}".equals(placeHolder)) { Matcher matcher = AGGREGATE_FUNC_PATTERN.matcher(ruleVariable.getValue()); @@ -293,7 +784,7 @@ private String replaceVariable(String template, List variables, St } } // Fix replacement issue that db is null when running workflow. - if ("".equals(ruleVariable.getValue())) { + if (ruleVariable.getValue() == null || "".equals(ruleVariable.getValue())) { sqlAction = sqlAction.replaceAll(placeHolder + ".", ""); } else { sqlAction = sqlAction.replaceAll(placeHolder, ruleVariable.getValue()); @@ -302,7 +793,7 @@ private String replaceVariable(String template, List variables, St LOGGER.info("Succeed to replace {} into {}", placeHolder, ruleVariable.getValue()); } if (PLACEHOLDER_PATTERN.matcher(sqlAction).matches()) { - throw new ConvertException("Unable to convert SQL, replacing placeholders failed"); + throw new ConvertException("Unable to convert SQL, replacing placeholders failed, still having placeholder."); } return sqlAction; diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/ConvertException.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/ConvertException.java index 391d9fc0..05d58e5d 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/ConvertException.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/ConvertException.java @@ -20,7 +20,6 @@ * @author howeye */ public class ConvertException extends Exception { - public ConvertException(String message) { super(message); } diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/DataQualityTaskException.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/DataQualityTaskException.java index 79b4b492..104a853e 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/DataQualityTaskException.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/DataQualityTaskException.java @@ -20,7 +20,6 @@ * @author howeye */ public class DataQualityTaskException extends Exception { - public DataQualityTaskException(String message) { super(message); } diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/RuleVariableNotFoundException.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/RuleVariableNotFoundException.java index a5fcd048..9d27d642 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/RuleVariableNotFoundException.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/RuleVariableNotFoundException.java @@ -20,7 +20,6 @@ * @author howeye */ public class RuleVariableNotFoundException extends Exception { - public RuleVariableNotFoundException(String message) { super(message); } diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/RuleVariableNotSupportException.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/RuleVariableNotSupportException.java index 785a8e9c..7bf94c33 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/RuleVariableNotSupportException.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/RuleVariableNotSupportException.java @@ -20,7 +20,6 @@ * @author howeye */ public class RuleVariableNotSupportException extends Exception { - public RuleVariableNotSupportException(String message) { super(message); } diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/TaskTypeException.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/TaskTypeException.java index 02c0e755..8a214eb2 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/TaskTypeException.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/exception/TaskTypeException.java @@ -20,7 +20,6 @@ * @author howeye */ public class TaskTypeException extends Exception { - public TaskTypeException(String message) { super(message); } diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/AbstractTranslator.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/AbstractTranslator.java index 4bc18dc5..ae5f2e69 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/AbstractTranslator.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/AbstractTranslator.java @@ -16,41 +16,40 @@ package com.webank.wedatasphere.qualitis.translator; -import com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException; -import com.webank.wedatasphere.qualitis.exception.RuleVariableNotSupportException; -import com.webank.wedatasphere.qualitis.rule.entity.RuleVariable; -import com.webank.wedatasphere.qualitis.rule.entity.TemplateStatisticsInputMeta; import com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException; import com.webank.wedatasphere.qualitis.exception.RuleVariableNotSupportException; import com.webank.wedatasphere.qualitis.rule.entity.RuleVariable; import com.webank.wedatasphere.qualitis.rule.entity.TemplateStatisticsInputMeta; +import java.text.ParseException; import java.util.List; +import java.util.Map; import java.util.Set; /** * @author howeye */ public abstract class AbstractTranslator { - - /** - * Generate persistence statement + * Generate persistence statement. * @param ruleId + * @param ruleMetricMaps * @param templateStatisticsInputMetas * @param applicationId * @param ruleVariables * @param createTime * @param count + * @param runDate * @return * @throws RuleVariableNotSupportException * @throws RuleVariableNotFoundException */ - public abstract List persistenceTranslate(Long ruleId, Set templateStatisticsInputMetas, String applicationId, List ruleVariables, - String createTime, Integer count) throws RuleVariableNotSupportException, RuleVariableNotFoundException; + public abstract List persistenceTranslate(Long ruleId, Map ruleMetricMaps, + Set templateStatisticsInputMetas, String applicationId, List ruleVariables, + String createTime, Integer count, String runDate) throws RuleVariableNotSupportException, RuleVariableNotFoundException; /** - * Generate initial statement + * Generate initial statement. * @return */ public abstract List getInitSentence(); diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/JdbcTranslator.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/JdbcTranslator.java index 3d955e1c..4d647a14 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/JdbcTranslator.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/JdbcTranslator.java @@ -16,32 +16,33 @@ package com.webank.wedatasphere.qualitis.translator; +import com.webank.wedatasphere.qualitis.config.TaskDataSourceConfig; import com.webank.wedatasphere.qualitis.converter.SqlTemplateConverter; +import com.webank.wedatasphere.qualitis.dao.RuleMetricDao; import com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException; import com.webank.wedatasphere.qualitis.exception.RuleVariableNotSupportException; import com.webank.wedatasphere.qualitis.rule.constant.InputActionStepEnum; import com.webank.wedatasphere.qualitis.rule.constant.StatisticsValueTypeEnum; import com.webank.wedatasphere.qualitis.rule.entity.RuleVariable; import com.webank.wedatasphere.qualitis.rule.entity.TemplateStatisticsInputMeta; -import com.webank.wedatasphere.qualitis.converter.SqlTemplateConverter; -import com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException; -import com.webank.wedatasphere.qualitis.exception.RuleVariableNotSupportException; -import com.webank.wedatasphere.qualitis.rule.constant.InputActionStepEnum; -import com.webank.wedatasphere.qualitis.rule.constant.StatisticsValueTypeEnum; -import com.webank.wedatasphere.qualitis.rule.entity.RuleVariable; -import com.webank.wedatasphere.qualitis.rule.entity.TemplateStatisticsInputMeta; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Set; +import javax.annotation.PostConstruct; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Configuration; -import javax.annotation.PostConstruct; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Set; - /** * Generate scala code of connecting mysql and save data into mysql * Example: @@ -54,7 +55,6 @@ @Configuration @ConditionalOnProperty(name = "task.persistent.type", havingValue = "jdbc") public class JdbcTranslator extends AbstractTranslator { - @Value("${task.persistent.username}") private String mysqlUsername; @Value("${task.persistent.password}") @@ -63,21 +63,29 @@ public class JdbcTranslator extends AbstractTranslator { private String mysqlAddress; @Value("${task.persistent.tableName}") private String resultTableName; - + @Autowired + private RuleMetricDao ruleMetricDao; + @Autowired + private TaskDataSourceConfig taskDataSourceConfig; private static final String PROP_VARIABLE_NAME = "prop"; private static final String STATISTICS_VALUE_FIELD_NAME = "value"; private static final String STATISTICS_APPLICATION_ID_FIELD_NAME = "application_id"; private static final String STATISTICS_RULE_ID_FIELD_NAME = "rule_id"; + private static final String STATISTICS_RULE_METRIC_ID_FIELD_NAME = "rule_metric_id"; + private static final String STATISTICS_RUN_DATE_FIELD_NAME = "run_date"; private static final String STATISTICS_RESULT_FILED_TYPE = "result_type"; private static final String STATISTICS_CREATE_TIME = "create_time"; private static final String STATISTICS_VALUE_PLACEHOLDER = "${VALUE}"; private static final String STATISTICS_APPLICATION_ID_PLACEHOLDER = "${APPLICATION_ID}"; private static final String STATISTICS_RULE_ID_PLACEHOLDER = "${RULE_ID}"; + private static final String STATISTICS_RULE_METRIC_ID_PLACEHOLDER = "${RULE_METRIC_ID}"; + private static final String STATISTICS_RUN_DATE_PLACEHOLDER = "${RUN_DATE}"; private static final String STATISTICS_RESULT_TYPE_PLACEHOLDER = "${RESULT_TYPE}"; private static final String STATISTICS_CREATE_TIME_PLACEHOLDER = "${CREATE_TIME}"; private static final String DECLARE_PROP_SENTENCE = "val " + PROP_VARIABLE_NAME + " = new java.util.Properties;"; + private String usernamePropSentence; private String passwordPropSentence; private String statisticsAndSaveResultTemplate; @@ -90,12 +98,15 @@ public class JdbcTranslator extends AbstractTranslator { @PostConstruct public void init() { usernamePropSentence = PROP_VARIABLE_NAME + ".setProperty(\"user\", \"" + mysqlUsername + "\");"; - passwordPropSentence = PROP_VARIABLE_NAME + ".setProperty(\"password\", \"" + mysqlPassword + "\");"; + String password = taskDataSourceConfig.getPassword(); + passwordPropSentence = PROP_VARIABLE_NAME + ".setProperty(\"password\", \"" + password + "\");"; statisticsAndSaveResultTemplate = SqlTemplateConverter.VARIABLE_NAME_PLACEHOLDER + ".selectExpr(\"" + STATISTICS_VALUE_PLACEHOLDER + " as " + STATISTICS_VALUE_FIELD_NAME + "\", \"'" + STATISTICS_APPLICATION_ID_PLACEHOLDER + "' as " + STATISTICS_APPLICATION_ID_FIELD_NAME + "\", \"'" + STATISTICS_RESULT_TYPE_PLACEHOLDER + "' as " + STATISTICS_RESULT_FILED_TYPE + "\", \"'" + STATISTICS_RULE_ID_PLACEHOLDER + "' as " + STATISTICS_RULE_ID_FIELD_NAME + "\", \"'" + + STATISTICS_RULE_METRIC_ID_PLACEHOLDER + "' as " + STATISTICS_RULE_METRIC_ID_FIELD_NAME + "\", \"'" + + STATISTICS_RUN_DATE_PLACEHOLDER + "' as " + STATISTICS_RUN_DATE_FIELD_NAME + "\", \"'" + STATISTICS_CREATE_TIME_PLACEHOLDER + "' as " + STATISTICS_CREATE_TIME + "\").write.mode(org.apache.spark.sql.SaveMode.Append).jdbc(\"" + mysqlAddress + "\", \"" + resultTableName + "\", " + PROP_VARIABLE_NAME + ");"; @@ -104,57 +115,174 @@ public void init() { /** * Generate statistic statement and save mysql statement * @param ruleId + * @param ruleMetricMaps * @param templateStatisticsInputMetas * @param applicationId * @param ruleVariables * @param createTime * @param count + * @param runDate * @return * @throws RuleVariableNotSupportException * @throws RuleVariableNotFoundException */ @Override - public List persistenceTranslate(Long ruleId, Set templateStatisticsInputMetas, String applicationId, List ruleVariables, - String createTime, Integer count) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + public List persistenceTranslate(Long ruleId, Map ruleMetricMaps, Set templateStatisticsInputMetas + , String applicationId, List ruleVariables, String createTime, Integer count, String runDate) throws RuleVariableNotSupportException, RuleVariableNotFoundException { List list = new ArrayList<>(); - list.addAll(getStatisticsAndSaveSentence(ruleId, templateStatisticsInputMetas, applicationId, ruleVariables, createTime, count)); + list.addAll(getStatisticsAndSaveSentence(ruleId, ruleMetricMaps, templateStatisticsInputMetas, applicationId, ruleVariables, createTime, count, runDate)); return list; } @Override public List getInitSentence() { - return Arrays.asList(getPropSentence(), getUsernamePropSentence(), getPasswordPropSentence()); + return Arrays.asList(getDriver(), getPropSentence(), getUsernamePropSentence(), getPasswordPropSentence()); + } + + private String getDriver() { + return "import java.sql.{Connection, DriverManager}"; } /** * Replace all place holder in sql, and generate save mysql statement * @param ruleId + * @param ruleMetricMap * @param templateStatisticsInputMetas * @param applicationId * @param ruleVariables * @param createTime * @param count + * @param runDate * @return * @throws RuleVariableNotSupportException * @throws RuleVariableNotFoundException */ - private List getStatisticsAndSaveSentence(Long ruleId, Set templateStatisticsInputMetas, String applicationId, List ruleVariables, - String createTime, Integer count) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + private List getStatisticsAndSaveSentence(Long ruleId, Map ruleMetricMap, + Set templateStatisticsInputMetas, String applicationId, List ruleVariables, + String createTime, Integer count, String runDate) throws RuleVariableNotSupportException, RuleVariableNotFoundException { List list = new ArrayList<>(); + if (StringUtils.isBlank(runDate)) { + sentenceWithoutRunDate(templateStatisticsInputMetas, ruleVariables, list, applicationId, createTime, count, ruleId, ruleMetricMap); + } else { + sentenceWithRunDate(templateStatisticsInputMetas, ruleVariables, list, applicationId, createTime, count, ruleId, ruleMetricMap, runDate); + + } + + return list; + } + + private void sentenceWithRunDate(Set templateStatisticsInputMetas, List ruleVariables, + List list, String applicationId, String createTime, Integer count, Long ruleId, Map ruleMetricMap, String runDate) + throws RuleVariableNotSupportException, RuleVariableNotFoundException { + Date runRealDate = null; + try { + runRealDate = new SimpleDateFormat("yyyyMMdd").parse(runDate); + } catch (ParseException e) { + LOGGER.error(e.getMessage(), e); + throw new RuleVariableNotSupportException("{&FAILED_TO_PARSE_RUN_DATE}"); + } + list.add("var connection: Connection = null"); + list.add("classOf[com.mysql.jdbc.Driver]"); + list.add("try {"); + list.add("\tconnection = DriverManager.getConnection(\"" + mysqlAddress + "\", " + PROP_VARIABLE_NAME + ")"); for (TemplateStatisticsInputMeta s : templateStatisticsInputMetas) { String funcName = s.getFuncName(); String value = getValue(ruleVariables, s); String persistSentence = statisticsAndSaveResultTemplate - .replace(STATISTICS_VALUE_PLACEHOLDER, funcName + "(" + value + ")") - .replace(STATISTICS_APPLICATION_ID_PLACEHOLDER, applicationId) - .replace(STATISTICS_RESULT_TYPE_PLACEHOLDER, s.getResultType()) - .replace(STATISTICS_CREATE_TIME_PLACEHOLDER, createTime) - .replace(SqlTemplateConverter.VARIABLE_NAME_PLACEHOLDER, getVariable(count)) - .replace(STATISTICS_RULE_ID_PLACEHOLDER, ruleId + ""); + .replace(STATISTICS_VALUE_PLACEHOLDER, funcName + "(" + value + ")") + .replace(STATISTICS_APPLICATION_ID_PLACEHOLDER, applicationId) + .replace(STATISTICS_RESULT_TYPE_PLACEHOLDER, s.getResultType()) + .replace(STATISTICS_CREATE_TIME_PLACEHOLDER, createTime) + .replace(SqlTemplateConverter.VARIABLE_NAME_PLACEHOLDER, getVariable(count)) + .replace(STATISTICS_RULE_ID_PLACEHOLDER, ruleId + ""); + persistSentence = persistSentence.replace(STATISTICS_RUN_DATE_PLACEHOLDER, runRealDate.getTime() + ""); + StringBuffer selectSql = new StringBuffer(); + StringBuffer deleteSql = new StringBuffer(); + String varName = s.getName().replace("{", "").replace("}", "").replace("&", ""); + if (ruleMetricMap.get(value) != null) { + persistSentence = persistSentence.replace(STATISTICS_RULE_METRIC_ID_PLACEHOLDER, ruleMetricMap.get(value) + ""); + selectSql.append("val selectSql").append("_").append(varName) + .append(" = \"(select * from ").append(resultTableName).append(" where rule_id = ").append(ruleId) + .append(" and rule_metric_id = ").append(ruleMetricMap.get(value)) + .append(" and (run_date = ").append(runRealDate.getTime()) + .append(")) qualitis_tmp_table\""); + deleteSql.append("val deleteSql").append("_").append(varName) + .append(" = \"delete from ").append(resultTableName).append(" where rule_id = ").append(ruleId) + .append(" and rule_metric_id = ").append(ruleMetricMap.get(value)) + .append(" and (run_date = ").append(runRealDate.getTime()) + .append(")\""); + } else { + if (CollectionUtils.isNotEmpty(ruleMetricMap.values())) { + persistSentence = persistSentence.replace(STATISTICS_RULE_METRIC_ID_PLACEHOLDER, ruleMetricMap.values().iterator().next() + ""); + selectSql.append("val selectSql").append("_").append(varName) + .append(" = \"(select * from ").append(resultTableName).append(" where rule_id = ").append(ruleId) + .append(" and rule_metric_id = ").append(ruleMetricMap.values().iterator().next()) + .append(" and (run_date = ").append(runRealDate.getTime()) + .append(")) qualitis_tmp_table\""); + deleteSql.append("val deleteSql").append("_").append(varName) + .append(" = \"delete from ").append(resultTableName).append(" where rule_id = ").append(ruleId) + .append(" and rule_metric_id = ").append(ruleMetricMap.values().iterator().next()) + .append(" and (run_date = ").append(runRealDate.getTime()) + .append(")\""); + } else { + persistSentence = persistSentence.replace(STATISTICS_RULE_METRIC_ID_PLACEHOLDER, "-1"); + selectSql.append("val selectSql").append("_").append(varName) + .append(" = \"(select * from ").append(resultTableName).append(" where rule_id = ").append(ruleId) + .append(" and rule_metric_id = ").append("-1") + .append(" and (run_date = ").append(runRealDate.getTime()) + .append(")) qualitis_tmp_table\""); + deleteSql.append("val deleteSql").append("_").append(varName) + .append(" = \"delete from ").append(resultTableName).append(" where rule_id = ").append(ruleId) + .append(" and rule_metric_id = ").append("-1") + .append(" and (run_date = ").append(runRealDate.getTime()) + .append(")\""); + } + } + list.add(selectSql.toString()); + // Judge the existence of task result with rule ID, rule metric ID, run date. + list.add("val resultDF" + "_" + varName + " = spark.read.jdbc(\"" + mysqlAddress + "\", selectSql" + "_" + varName + ", prop)"); + list.add("val lines" + "_" + varName + " = resultDF" + "_" + varName + ".count()"); + list.add("if (lines" + "_" + varName + " >= 1) {"); + // Delete the exist task result before insert. + list.add(deleteSql.toString()); + list.add("connection.createStatement().executeUpdate(deleteSql" + "_" + varName + ")"); + list.add("}"); + list.add(persistSentence); + LOGGER.info("Succeed to get persist sentence. sentence: {}", persistSentence); + } + list.add("} catch {"); + list.add("case e: Exception => println(\"JDBC operations failed because of \", e.getMessage())"); + list.add("} finally {"); + list.add("\tconnection.close()"); + list.add("}"); + } + + private void sentenceWithoutRunDate(Set templateStatisticsInputMetas, List ruleVariables, List list + , String applicationId, String createTime, Integer count, Long ruleId, Map ruleMetricMap) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + for (TemplateStatisticsInputMeta s : templateStatisticsInputMetas) { + String funcName = s.getFuncName(); + String value = getValue(ruleVariables, s); + String persistSentence = statisticsAndSaveResultTemplate + .replace(STATISTICS_VALUE_PLACEHOLDER, funcName + "(" + value + ")") + .replace(STATISTICS_APPLICATION_ID_PLACEHOLDER, applicationId) + .replace(STATISTICS_RESULT_TYPE_PLACEHOLDER, s.getResultType()) + .replace(STATISTICS_CREATE_TIME_PLACEHOLDER, createTime) + .replace(SqlTemplateConverter.VARIABLE_NAME_PLACEHOLDER, getVariable(count)) + .replace(STATISTICS_RULE_ID_PLACEHOLDER, ruleId + ""); + + if (ruleMetricMap.get(value) != null) { + persistSentence = persistSentence.replace(STATISTICS_RULE_METRIC_ID_PLACEHOLDER, ruleMetricMap.get(value) + ""); + } else { + if (CollectionUtils.isNotEmpty(ruleMetricMap.values())) { + persistSentence = persistSentence.replace(STATISTICS_RULE_METRIC_ID_PLACEHOLDER, ruleMetricMap.values().iterator().next() + ""); + } else { + persistSentence = persistSentence.replace(STATISTICS_RULE_METRIC_ID_PLACEHOLDER, "-1"); + } + } + persistSentence = persistSentence.replace(STATISTICS_RUN_DATE_PLACEHOLDER, "-1"); list.add(persistSentence); LOGGER.info("Succeed to get persist sentence. sentence: {}", persistSentence); } - return list; } /** @@ -165,7 +293,8 @@ private List getStatisticsAndSaveSentence(Long ruleId, Set ruleVariables, TemplateStatisticsInputMeta templateStatisticsInputMeta) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + private String getValue(List ruleVariables, TemplateStatisticsInputMeta templateStatisticsInputMeta) + throws RuleVariableNotSupportException, RuleVariableNotFoundException { if (templateStatisticsInputMeta.getValueType().equals(StatisticsValueTypeEnum.FIXED_VALUE.getCode())) { return templateStatisticsInputMeta.getValue(); } else { @@ -180,7 +309,7 @@ private String getValue(List ruleVariables, TemplateStatisticsInpu } } - throw new RuleVariableNotFoundException("Rule_variable of statistics_arg: [" + templateStatisticsInputMeta.getId() + "] does not exist"); + throw new RuleVariableNotFoundException("Rule_variable of statistics_arg: [" + templateStatisticsInputMeta.getId() + "] {&DOES_NOT_EXIST}"); } diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/util/DateExprReplaceUtil.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/util/DateExprReplaceUtil.java index f54e8c4c..1ad77997 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/util/DateExprReplaceUtil.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/util/DateExprReplaceUtil.java @@ -16,6 +16,10 @@ package com.webank.wedatasphere.qualitis.util; +import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; +import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; +import java.util.HashMap; +import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,6 +44,14 @@ private DateExprReplaceUtil() { private static final Pattern MINUS_PATTERN = Pattern.compile("\\s*-\\s*[0-9]+"); private static final Pattern DIGITAL_PATTERN = Pattern.compile("[0-9]+"); + private static final Pattern CUSTOM_PLACEHOLODER_PATTERN = Pattern.compile("\\$\\{[^ ]*}"); + private static final Map RUN_DATE_FORMAT = new HashMap(2){{ + put("run_date","yyyyMMdd"); + put("run_date_std", "yyyy-MM-dd"); + }}; + + + private static final Logger LOGGER = LoggerFactory.getLogger(DateExprReplaceUtil.class); /** @@ -92,4 +104,61 @@ private static Integer getTime(String replaceStr) { } } + public static String replaceRunDate(Date date, String midTableAction) throws UnExpectedRequestException { + Matcher matcher = CUSTOM_PLACEHOLODER_PATTERN.matcher(midTableAction); + while (matcher.find()) { + String replaceStr = matcher.group(); + boolean legalSystemParams = replaceStr.contains("run_date") || replaceStr.contains("run_date_std"); + if (! legalSystemParams) { + throw new UnExpectedRequestException("Custom placeholoder must be system variables."); + } + String currentParam = replaceStr.substring(2, replaceStr.length() - 1); + String dateStr = ""; + Calendar calendar = Calendar.getInstance(); + if (currentParam.contains(SpecCharEnum.MINUS.getValue())) { + String[] keys = currentParam.split(SpecCharEnum.MINUS.getValue()); + int forwayDay = Integer.parseInt(keys[1]); + calendar.setTime(date); + calendar.add(Calendar.DATE, 0 - forwayDay); + dateStr = new SimpleDateFormat(RUN_DATE_FORMAT.get(keys[0])).format(calendar.getTime()); + } else { + calendar.setTime(date); + calendar.add(Calendar.DATE, -1); + dateStr = new SimpleDateFormat(RUN_DATE_FORMAT.get(currentParam)).format(calendar.getTime()); + } + + midTableAction = midTableAction.replace(replaceStr, dateStr); + } + + return midTableAction; + } + + public static String replaceFilter(Date date, String filter) throws UnExpectedRequestException { + Matcher matcher = CUSTOM_PLACEHOLODER_PATTERN.matcher(filter); + while (matcher.find()) { + String replaceStr = matcher.group(); + boolean legalSystemParams = replaceStr.contains("run_date") || replaceStr.contains("run_date_std"); + if (! legalSystemParams) { + throw new UnExpectedRequestException("Custom placeholoder must be system variables."); + } + String currentParam = replaceStr.substring(2, replaceStr.length() - 1); + String dateStr = ""; + Calendar calendar = Calendar.getInstance(); + if (currentParam.contains(SpecCharEnum.MINUS.getValue())) { + String[] keys = currentParam.split(SpecCharEnum.MINUS.getValue()); + int forwayDay = Integer.parseInt(keys[1]); + calendar.setTime(date); + calendar.add(Calendar.DATE, 0 - forwayDay - 1); + dateStr = new SimpleDateFormat(RUN_DATE_FORMAT.get(keys[0])).format(calendar.getTime()); + } else { + calendar.setTime(date); + calendar.add(Calendar.DATE, -1); + dateStr = new SimpleDateFormat(RUN_DATE_FORMAT.get(currentParam)).format(calendar.getTime()); + } + + filter = filter.replace(replaceStr, dateStr); + } + + return filter; + } } diff --git a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityTask.java b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityTask.java index b54fa18a..18ad326b 100644 --- a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityTask.java +++ b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityTask.java @@ -30,13 +30,14 @@ * @author howeye */ public class DataQualityTask { - private String applicationId; private Integer taskType; private List ruleTaskDetails; private String createTime; private String partition; private Long taskId; + private String user; + private String startupParam; public DataQualityTask() { } @@ -69,6 +70,14 @@ public DataQualityTask(String applicationId, String createTime, String partition this.ruleTaskDetails = ruleTaskDetails; } + public String getUser() { + return user; + } + + public void setUser(String user) { + this.user = user; + } + public Long getTaskId() { return taskId; } @@ -117,15 +126,24 @@ public void setRuleTaskDetails(List ruleTaskDetails) { this.ruleTaskDetails = ruleTaskDetails; } + public String getStartupParam() { + return startupParam; + } + + public void setStartupParam(String startupParam) { + this.startupParam = startupParam; + } + @Override public String toString() { return "DataQualityTask{" + - "applicationId='" + applicationId + '\'' + - ", taskType=" + taskType + - ", ruleTaskDetails=" + ruleTaskDetails + - ", createTime='" + createTime + '\'' + - ", partition='" + partition + '\'' + - ", taskId='" + taskId + '\'' + - '}'; + "applicationId='" + applicationId + '\'' + + ", taskType=" + taskType + + ", ruleTaskDetails=" + ruleTaskDetails + + ", createTime='" + createTime + '\'' + + ", partition='" + partition + '\'' + + ", taskId=" + taskId + + ", user='" + user + '\'' + + '}'; } } diff --git a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/RuleTaskDetail.java b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/RuleTaskDetail.java index 066a4044..4476c80a 100644 --- a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/RuleTaskDetail.java +++ b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/RuleTaskDetail.java @@ -22,7 +22,6 @@ * @author howeye */ public class RuleTaskDetail { - private Rule rule; private String midTableName; diff --git a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/constant/TaskTypeEnum.java b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/constant/TaskTypeEnum.java index 973fc8bb..a2648f01 100644 --- a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/constant/TaskTypeEnum.java +++ b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/constant/TaskTypeEnum.java @@ -27,8 +27,7 @@ public enum TaskTypeEnum { JAVA_TASK(2, "JAVA TASK"), SPARK_TASK(3, "SPARK TASK"), PYTHON_TASK(4, "PYTHON TASK"), - MIX_TASK(5, "MIX TASK"), - ; + MIX_TASK(5, "MIX TASK"); private Integer code; private String message; diff --git a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/AbstractTaskDivider.java b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/AbstractTaskDivider.java index 6df675be..6973e4da 100644 --- a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/AbstractTaskDivider.java +++ b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/AbstractTaskDivider.java @@ -18,9 +18,9 @@ import com.webank.wedatasphere.qualitis.bean.DataQualityTask; import com.webank.wedatasphere.qualitis.exception.ArgumentException; +import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; +import com.webank.wedatasphere.qualitis.metadata.exception.MetaDataAcquireFailedException; import com.webank.wedatasphere.qualitis.rule.entity.Rule; -import com.webank.wedatasphere.qualitis.bean.DataQualityTask; -import com.webank.wedatasphere.qualitis.exception.ArgumentException; import java.util.Date; import java.util.List; @@ -29,20 +29,21 @@ * @author howeye */ public abstract class AbstractTaskDivider { - - /** - * Divided rules into multi-task + * Divided rules into multi-task. * @param rules * @param applicationId * @param createTime * @param partition * @param date * @param database + * @param user * @param threshold * @return * @throws ArgumentException + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException */ - public abstract List divide(List rules, String applicationId, String createTime, String partition, Date date, String database, Integer threshold) throws ArgumentException; - + public abstract List divide(List rules, String applicationId, String createTime, String partition, Date date, + String database, String user, Integer threshold) throws ArgumentException, UnExpectedRequestException, MetaDataAcquireFailedException; } \ No newline at end of file diff --git a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/SameDataSourceTaskDivider.java b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/SameDataSourceTaskDivider.java index 70bb6cc6..81fee12e 100644 --- a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/SameDataSourceTaskDivider.java +++ b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/SameDataSourceTaskDivider.java @@ -18,74 +18,78 @@ import com.webank.wedatasphere.qualitis.bean.DataQualityTask; import com.webank.wedatasphere.qualitis.bean.RuleTaskDetail; +import com.webank.wedatasphere.qualitis.dao.UserDao; import com.webank.wedatasphere.qualitis.exception.ArgumentException; -import com.webank.wedatasphere.qualitis.rule.constant.TemplateInputTypeEnum; import com.webank.wedatasphere.qualitis.rule.entity.Rule; import com.webank.wedatasphere.qualitis.rule.entity.RuleDataSource; -import com.webank.wedatasphere.qualitis.rule.entity.RuleVariable; -import com.webank.wedatasphere.qualitis.bean.RuleTaskDetail; -import com.webank.wedatasphere.qualitis.exception.ArgumentException; -import com.webank.wedatasphere.qualitis.bean.DataQualityTask; -import com.webank.wedatasphere.qualitis.bean.RuleTaskDetail; -import com.webank.wedatasphere.qualitis.exception.ArgumentException; -import org.apache.commons.lang.RandomStringUtils; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.stream.Collectors; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.time.FastDateFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - -import java.security.SecureRandom; -import java.util.*; -import java.util.stream.Collectors; +import org.springframework.beans.factory.annotation.Autowired; /** * Divided rule into same task if they have the same datasource * @author howeye */ public class SameDataSourceTaskDivider extends AbstractTaskDivider { + @Autowired + private UserDao userDao; private static final FastDateFormat TASK_TIME_FORMAT = FastDateFormat.getInstance("yyyyMMddHHmmss"); private static final Logger LOGGER = LoggerFactory.getLogger(SameDataSourceTaskDivider.class); - private Random random = new Random(); - @Override - public List divide(List rules, String applicationId, String createTime, String partition, Date date, String database, Integer threshold) throws ArgumentException { - LOGGER.info("Start to divide rules into tasks, according to if datasource of the rule is the same or not"); - if (null == rules || rules.isEmpty()) { - throw new ArgumentException("Argument of rules can not be null or empty"); - } - + public List divide(List rules, String applicationId, String createTime, String partition, Date date, String database, String user + , Integer threshold) + throws ArgumentException { LOGGER.info("Start to classify rules by datasource"); Map> sameDataSourceRule = new HashMap<>(4); for (Rule rule : rules) { - String key = getKey(rule); - if (sameDataSourceRule.containsKey(key)) { + String key = getKey(rule, user); + // Rules without specific execution parameters can be split into the same task, and rules with execution parameters must be treated as a separate task. + Boolean specifyStaticStartupParam = (rule.getSpecifyStaticStartupParam() != null && rule.getSpecifyStaticStartupParam()); + if (sameDataSourceRule.containsKey(key) && ! specifyStaticStartupParam) { sameDataSourceRule.get(key).add(rule); + } else if (specifyStaticStartupParam) { + List tmp = new ArrayList<>(); + tmp.add(rule); + sameDataSourceRule.put(UUID.randomUUID().toString().replace("-", "") + "." + key, tmp); } else { List tmp = new ArrayList<>(); tmp.add(rule); sameDataSourceRule.put(key, tmp); } } - LOGGER.info("Succeed to classify rules by datasource. result: {}", sameDataSourceRule); - + LOGGER.info("Succeed to classify rules by datasource. Result: {}", sameDataSourceRule); List result = new ArrayList<>(); for (String key : sameDataSourceRule.keySet()) { List ruleList = sameDataSourceRule.get(key); + String ruleStartup = ruleList.stream().map(Rule::getStaticStartupParam) + .filter(staticStartupParam -> StringUtils.isNotBlank(staticStartupParam)) + .collect(Collectors.joining()); List ruleIdList = ruleList.stream().map(Rule::getId).collect(Collectors.toList()); LOGGER.info("Start to divide rules: {} into a task.", ruleIdList); + LOGGER.info("Start to divide rules. Key: {}", key); + String[] keys = key.split("\\."); + String proxyUser = keys[keys.length - 1]; List ruleTaskDetails = new ArrayList<>(); + if (StringUtils.isNotBlank(proxyUser) && database.contains("_ind")) { + database = proxyUser.concat("_ind"); + } for (Rule rule : ruleList) { - String nonce = RandomStringUtils.randomNumeric(6); - String tableName = generateTable(date, rule, nonce); + String tableName = generateTable(rule); String midTableName = database + "." + tableName; - if (rule.getChildRule() != null) { - nonce = RandomStringUtils.randomNumeric(6); - tableName = generateTable(date, rule, nonce); - midTableName += "," + database + "." + tableName; - } if (ruleTaskDetails.size() < threshold) { ruleTaskDetails.add(new RuleTaskDetail(rule, midTableName)); @@ -93,29 +97,57 @@ public List divide(List rules, String applicationId, Stri List ruleTaskDetailCopy = new ArrayList<>(); ruleTaskDetailCopy.addAll(ruleTaskDetails); DataQualityTask tmp = new DataQualityTask(applicationId, createTime, partition, ruleTaskDetailCopy); + if (StringUtils.isNotBlank(ruleStartup)) { + tmp.setStartupParam(ruleStartup); + } + if (StringUtils.isNotBlank(proxyUser)) { + LOGGER.info("Start to divide rules. Proxy user: {}", proxyUser); + tmp.setUser(proxyUser); + } result.add(tmp); ruleTaskDetails = new ArrayList<>(); } } if (ruleTaskDetails.size() > 0) { DataQualityTask tmp = new DataQualityTask(applicationId, createTime, partition, ruleTaskDetails); + if (StringUtils.isNotBlank(ruleStartup)) { + tmp.setStartupParam(ruleStartup); + } + if (StringUtils.isNotBlank(proxyUser)) { + tmp.setUser(proxyUser); + } result.add(tmp); LOGGER.info("Succeed to divide rules: {} into a task {}", ruleIdList, tmp); } } LOGGER.info("Succeed to divide all rules into tasks. result: {}", result); - return result; } - private String generateTable(Date date, Rule rule, String nonce) { - return "mid_application_" + rule.getId() + "_" + TASK_TIME_FORMAT.format(date) + "_" + nonce; + private String generateTable(Rule rule) { + StringBuffer name = new StringBuffer(); + name.append(rule.getProject().getName()).append("_") + .append(rule.getName()); + + return name.toString(); } - private String getKey(Rule rule) throws ArgumentException { + private String getKey(Rule rule, String user) throws ArgumentException { if (rule.getRuleDataSources().size() != 0) { - RuleDataSource ruleDataSource = rule.getRuleDataSources().iterator().next(); - return ruleDataSource.getClusterName() + "." + ruleDataSource.getDbName(); + List ruleDataSourceList = rule.getRuleDataSources().stream().filter(dataSource -> StringUtils.isNotBlank(dataSource.getDbName())).collect( + Collectors.toList()); + RuleDataSource ruleDataSource; + if (CollectionUtils.isNotEmpty(ruleDataSourceList)) { + ruleDataSource = ruleDataSourceList.iterator().next(); + } else { + ruleDataSource = rule.getRuleDataSources().iterator().next(); + } + String proxyUser = ruleDataSource.getProxyUser(); + if (StringUtils.isNotBlank(proxyUser)) { + return ruleDataSource.getClusterName() + "." + ruleDataSource.getDbName() + "." + proxyUser; + } + return ruleDataSource.getClusterName() + "." + ruleDataSource.getDbName() + "." + user; + } throw new ArgumentException("Error! Rule variables miss data"); diff --git a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/exception/ArgumentException.java b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/exception/ArgumentException.java index 3dae188f..4f11c291 100644 --- a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/exception/ArgumentException.java +++ b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/exception/ArgumentException.java @@ -20,7 +20,6 @@ * @author howeye */ public class ArgumentException extends Exception { - public ArgumentException(String message) { super(message); } diff --git a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/MetaDataClientImpl.java b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/MetaDataClientImpl.java index eb5f2d38..fdc198b5 100644 --- a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/MetaDataClientImpl.java +++ b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/MetaDataClientImpl.java @@ -17,26 +17,39 @@ package com.webank.wedatasphere.qualitis.client.impl; import com.webank.wedatasphere.qualitis.config.LinkisConfig; +import com.webank.wedatasphere.qualitis.constant.LinkisResponseKeyEnum; +import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; import com.webank.wedatasphere.qualitis.dao.ClusterInfoDao; import com.webank.wedatasphere.qualitis.entity.ClusterInfo; import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; import com.webank.wedatasphere.qualitis.metadata.client.MetaDataClient; -import com.webank.wedatasphere.qualitis.metadata.exception.MetaDataAcquireFailedException; import com.webank.wedatasphere.qualitis.metadata.request.GetClusterByUserRequest; import com.webank.wedatasphere.qualitis.metadata.request.GetColumnByUserAndTableRequest; +import com.webank.wedatasphere.qualitis.metadata.exception.MetaDataAcquireFailedException; import com.webank.wedatasphere.qualitis.metadata.request.GetDbByUserAndClusterRequest; import com.webank.wedatasphere.qualitis.metadata.request.GetTableByUserAndDbRequest; +import com.webank.wedatasphere.qualitis.metadata.request.GetUserTableByCsIdRequest; +import com.webank.wedatasphere.qualitis.metadata.request.GetUserColumnByCsRequest; import com.webank.wedatasphere.qualitis.metadata.response.DataInfo; import com.webank.wedatasphere.qualitis.metadata.response.cluster.ClusterInfoDetail; import com.webank.wedatasphere.qualitis.metadata.response.column.ColumnInfoDetail; import com.webank.wedatasphere.qualitis.metadata.response.db.DbInfoDetail; +import com.webank.wedatasphere.qualitis.metadata.response.table.CsTableInfoDetail; +import com.webank.wedatasphere.qualitis.metadata.response.table.PartitionStatisticsInfo; import com.webank.wedatasphere.qualitis.metadata.response.table.TableInfoDetail; +import com.webank.wedatasphere.qualitis.metadata.response.table.TableStatisticsInfo; +import com.webank.wedatasphere.qualitis.response.GeneralResponse; +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import javax.ws.rs.core.UriBuilder; import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.json.JSONException; +import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -45,6 +58,8 @@ import org.springframework.http.HttpMethod; import org.springframework.http.MediaType; import org.springframework.stereotype.Component; +import org.springframework.web.client.ResourceAccessException; +import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; /** @@ -53,9 +68,12 @@ */ @Component public class MetaDataClientImpl implements MetaDataClient { - private final static String QUERY_CS_TABLE_PATH = "/dss/cs/tables"; private final static String QUERY_CS_COLUMN_PATH = "/dss/cs/columns"; + private final static String QUERY_WORKFLOW_TABLE_PATH = "/dss/workflow/tables"; + private final static String QUERY_WORKFLOW_COLUMN_PATH = "/dss/workflow/columns"; + + private static final String LINKIS_ONE_VERSION = "1.0"; @Autowired private ClusterInfoDao clusterInfoDao; @@ -106,9 +124,10 @@ public DataInfo getDbByUserAndCluster(GetDbByUserAndClusterRequest Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); LOGGER.info("Start to get db by user and cluster by linkis. response: {}", response); - if (!checkResponse(response)) { + if (! checkResponse(response)) { String message = (String) response.get("message"); - throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, message: " + message); } List allDbs = ((List>) ((Map) response.get("data")).get("dbs")).stream() @@ -128,7 +147,6 @@ public DataInfo getDbByUserAndCluster(GetDbByUserAndClusterRequest return dataInfo; } - @Override public DataInfo getTableByUserAndDb(GetTableByUserAndDbRequest request) throws UnExpectedRequestException, MetaDataAcquireFailedException { @@ -145,13 +163,13 @@ public DataInfo getTableByUserAndDb(GetTableByUserAndDbRequest headers.add("Token-Code", clusterInfo.getLinkisToken()); HttpEntity entity = new HttpEntity<>(headers); - LOGGER - .info("Start to get table by user and cluster and db by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + LOGGER.info("Start to get table by user and cluster and db by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); - LOGGER.info("Start to get table by user and cluster and db by linkis. response: {}", response); + LOGGER.info("Finished to get table by user and cluster and db by linkis. response: {}", response); - if (!checkResponse(response)) { + if (! checkResponse(response)) { String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); } @@ -172,35 +190,6 @@ public DataInfo getTableByUserAndDb(GetTableByUserAndDbRequest return dataInfo; } - @Override - public String getTableComment(String clusterName, String dbName, String tableName, String userName) - throws MetaDataAcquireFailedException, UnExpectedRequestException { - ClusterInfo clusterInfo = checkClusterNameExists(clusterName); - // send request to get table comment. - String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getTableComment()) - .queryParam("database", dbName).queryParam("tableName", tableName).toString(); - - HttpHeaders headers = new HttpHeaders(); - headers.setContentType(MediaType.APPLICATION_JSON); - headers.add("Token-User", userName); - headers.add("Token-Code", clusterInfo.getLinkisToken()); - - HttpEntity entity = new HttpEntity<>(headers); - LOGGER.info("Start to get table comment by user and cluster and db by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, - entity); - Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); - LOGGER.info("Finished to get table comment by user and cluster and db by linkis. response: {}", response); - - if (!checkResponse(response)) { - String message = (String) response.get("message"); - throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); - } - Object result = ((Map) ((Map) ((Map) response.get("data")).get("tableBaseInfo")).get("base")) - .get("comment"); - String comment = result == null ? "no comment" : result.toString(); - return comment; - } - @Override public DataInfo getColumnByUserAndTable(GetColumnByUserAndTableRequest request) throws UnExpectedRequestException, MetaDataAcquireFailedException { @@ -220,10 +209,11 @@ public DataInfo getColumnByUserAndTable(GetColumnByUserAndTabl LOGGER.info("Start to get column by user and cluster and db and table by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); - LOGGER.info("Start to get table by user and cluster and and table by linkis. response: {}", response); + LOGGER.info("Finished to get table by user and cluster and and table by linkis. response: {}", response); - if (!checkResponse(response)) { + if (! checkResponse(response)) { String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); } @@ -243,8 +233,101 @@ public DataInfo getColumnByUserAndTable(GetColumnByUserAndTabl } @Override - public List getColumnInfo(String clusterName, String dbName, String tableName, String userName) + public String getTableBasicInfo(String clusterName, String dbName, String tableName, String userName) + throws MetaDataAcquireFailedException, UnExpectedRequestException { + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get table comment. + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getTableInfo()) + .queryParam("database", dbName).queryParam("tableName", tableName).toString(); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", userName); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get table comment by user and cluster and db by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, + entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finished to get table comment by user and cluster and db by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Object result = ((Map) ((Map) ((Map) response.get("data")).get("tableBaseInfo")).get("base")) + .get("comment"); + String comment = result == null ? "no comment" : result.toString(); + return comment; + } + + + @Override + public DataInfo getTableByCsId(GetUserTableByCsIdRequest request) throws MetaDataAcquireFailedException, UnExpectedRequestException { + DataInfo result = new DataInfo<>(); + List csTableInfoDetailList = new ArrayList<>(); + try { + LOGGER.info("Start to get tables with context service ID and node name by restful API. csId: {}, nodeName: {}", request.getCsId(), request.getNodeName()); + ClusterInfo clusterInfo = checkClusterNameExists(request.getClusterName()); + String authUser = request.getLoginUser(); + + // send request + String url; + if (clusterInfo.getClusterType().endsWith(LINKIS_ONE_VERSION)) { + url = getPath(clusterInfo.getLinkisAddress()).path(QUERY_WORKFLOW_TABLE_PATH).toString(); + } else { + url = getPath(clusterInfo.getLinkisAddress()).path(QUERY_CS_TABLE_PATH).toString(); + } + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + JSONObject jsonObject = new JSONObject(); + try { + jsonObject.put("contextID", request.getCsId()); + jsonObject.put("nodeName", request.getNodeName()); + } catch (JSONException e) { + LOGGER.error(e.getMessage(), e); + throw new UnExpectedRequestException("Failed to construct http body json with context ID and node name", 500); + } + + HttpEntity entity = new HttpEntity<>(jsonObject.toString(), headers); + LOGGER.info("Start to get table with context service ID and node name by restful API. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.POST, entity); + Map response = restTemplate.exchange(url, HttpMethod.POST, entity, Map.class).getBody(); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + LOGGER.info("Finished to get table with context service ID and node name by restful API. response: {}", response); + Map data = (Map) response.get("data"); + List> tables = (List>) data.get("tables"); + if (tables == null || tables.size() == 0) { + return result; + } + LOGGER.info("Successfully to get tables with context service ID and node name by restful API. csId: {}, nodeName: {}, tables: {}", + request.getCsId(), request.getNodeName(), tables); + for (Map table : tables) { + CsTableInfoDetail csTableInfoDetail = new CsTableInfoDetail(); + csTableInfoDetail.setTableName(table.get("tableName").toString()); + csTableInfoDetail.setContextKey(table.get("contextKey").toString()); + csTableInfoDetailList.add(csTableInfoDetail); + } + result.setContent(csTableInfoDetailList); + result.setTotalCount(tables.size()); + } catch (RestClientException e) { + LOGGER.error(e.getMessage(), e); + throw new MetaDataAcquireFailedException("Error! Can not get tables by context service ID and node name", 500); + } + return result; + } + + @Override + public List getColumnInfo(String clusterName, String dbName, String tableName, String userName) throws MetaDataAcquireFailedException, UnExpectedRequestException { ClusterInfo clusterInfo = checkClusterNameExists(clusterName); // send request to get table comment. String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getColumnInfo()) @@ -255,13 +338,13 @@ public List getColumnInfo(String clusterName, String dbName, S headers.add("Token-User", userName); headers.add("Token-Code", clusterInfo.getLinkisToken()); HttpEntity entity = new HttpEntity<>(headers); - LOGGER.info("Start to get column info by user and cluster and db and table by linkis. url: {}, method: {}, body: {}", url, - javax.ws.rs.HttpMethod.GET, entity); + LOGGER.info("Start to get column info by user and cluster and db and table by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); LOGGER.info("Finished to get column info by user and cluster and db and table by linkis. response: {}", response); - if (!checkResponse(response)) { + if (! checkResponse(response)) { String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); } @@ -283,8 +366,699 @@ public List getColumnInfo(String clusterName, String dbName, S return result; } - private ClusterInfo checkClusterNameExists(String clusterName) throws - UnExpectedRequestException { + @Override + public DataInfo getColumnByCsId(GetUserColumnByCsRequest request) + throws MetaDataAcquireFailedException, UnExpectedRequestException { + DataInfo result = new DataInfo<>(); + List list = new ArrayList<>(); + + try { + LOGGER.info("Start to get columns with context service ID and table's context key. csId: {}, contextKey: {}", request.getCsId(), + request.getContextKey()); + ClusterInfo clusterInfo = checkClusterNameExists(request.getClusterName()); + String authUser = request.getLoginUser(); + // send request + String url; + if (clusterInfo.getClusterType().endsWith(LINKIS_ONE_VERSION)) { + url = getPath(clusterInfo.getLinkisAddress()).path(QUERY_WORKFLOW_COLUMN_PATH).toString(); + } else { + url = getPath(clusterInfo.getLinkisAddress()).path(QUERY_CS_COLUMN_PATH).toString(); + } + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + JSONObject jsonObject = new JSONObject(); + try { + jsonObject.put("contextID", request.getCsId()); + jsonObject.put("contextKey", request.getContextKey()); + } catch (JSONException e) { + LOGGER.error("Failed to construct http body json, exception is : {}", e); + } + + HttpEntity entity = new HttpEntity<>(jsonObject.toString(), headers); + LOGGER.info("Start to get column with context service ID and table's context key by restful API. url: {}, method: {}, body: {}", url, + javax.ws.rs.HttpMethod.POST, entity); + Map response = restTemplate.exchange(url, HttpMethod.POST, entity, Map.class).getBody(); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + LOGGER.info("Finished to get column with context service ID and table's context key by restful API. response: {}", response); + Map data = (Map) response.get("data"); + List> columns = (List>) data.get("columns"); + if (columns == null || columns.size() == 0) { + return result; + } + LOGGER.info("Successfully to get columns with context service ID and table's context key by restful API. csId: {}, contextKey: {}", + request.getCsId(), request.getContextKey()); + for (Map column : columns) { + ColumnInfoDetail columnInfoDetail = new ColumnInfoDetail(); + columnInfoDetail.setFieldName(column.get("columnName").toString()); + columnInfoDetail.setDataType(column.get("columnType").toString()); + columnInfoDetail.setColumnComment(column.get("columnComment") == null ? "" : column.get("columnComment").toString()); + columnInfoDetail.setPartitionField((Boolean) column.get("partitioned")); + list.add(columnInfoDetail); + } + result.setTotalCount(columns.size()); + result.setContent(list); + } catch (RestClientException e) { + LOGGER.error(e.getMessage(), e); + throw new MetaDataAcquireFailedException("Error! Can not get column by context service ID", 500); + } + return result; + } + + @Override + public TableStatisticsInfo getTableStatisticsInfo(String clusterName, String dbName, String tableName, String userName) + throws UnExpectedRequestException, MetaDataAcquireFailedException, RestClientException { + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getTableStatistics()) + .queryParam("database", dbName) + .queryParam("tableName", tableName).toString(); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", userName); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get table info by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response; + try { + response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + } catch (ResourceAccessException e) { + LOGGER.error(e.getMessage(), e); + throw new MetaDataAcquireFailedException("Error! Can not get table info from linkis, exception: " + e.getMessage(), 500); + } + LOGGER.info("Finish to get table info by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get table info from linkis, exception: " + message); + } + Map result = (Map) ((Map) response.get("data")).get("tableStatisticInfo"); + TableStatisticsInfo tableStatisticsInfo = new TableStatisticsInfo(); + tableStatisticsInfo.setTableFileCount(Integer.parseInt(result.get("fileNum").toString())); + tableStatisticsInfo.setTableSize(result.get("tableSize").toString()); + tableStatisticsInfo.setPartitions((List) result.get("partitions")); + + return tableStatisticsInfo; + } + + @Override + public PartitionStatisticsInfo getPartitionStatisticsInfo(String clusterName, String dbName, String tableName, String partitionPath, String userName) + throws UnExpectedRequestException, MetaDataAcquireFailedException, RestClientException { + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getPartitionStatistics()) + .queryParam("database", dbName) + .queryParam("tableName", tableName) + .queryParam("partitionPath", partitionPath).toString(); + try { + url = URLDecoder.decode(url, "UTF-8"); + } catch (UnsupportedEncodingException e) { + LOGGER.error(e.getMessage(), e); + throw new UnExpectedRequestException("Decode get partition statistic info exception", 500); + } + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", userName); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get partition info by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = null; + try { + response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + } catch (ResourceAccessException e) { + LOGGER.error(e.getMessage(), e); + throw new MetaDataAcquireFailedException("Error! Can not get partition info from linkis, exception: " + e.getMessage(), 500); + } + LOGGER.info("Finish to get partition info by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get partition info from linkis, exception: " + message); + } + Map result = (Map) ((Map) response.get("data")).get("partitionStatisticInfo"); + PartitionStatisticsInfo partitionStatisticsInfo = new PartitionStatisticsInfo(); + partitionStatisticsInfo.setPartitionChildCount(Integer.parseInt(result.get("fileNum").toString())); + partitionStatisticsInfo.setPartitionSize(result.get("partitionSize").toString()); + partitionStatisticsInfo.setPartitions((List) result.get("childrens")); + return partitionStatisticsInfo; + } + + @Override + public boolean fieldExist(String col, List cols, Map mappingCols) { + // single or custom + if (StringUtils.isNotBlank(col)) { + // single field + if (cols == null) { + return false; + } + if (col.equals(SpecCharEnum.STAR.getValue())){ + return cols != null && cols.size() > 0; + } + String[] colsInfo = col.split("\\|"); + int diff = colsInfo.length; + for (String column : colsInfo) { + for (ColumnInfoDetail columnInfoDetail : cols) { + String realNameWithType = columnInfoDetail.getFieldName() + ":" + columnInfoDetail.getDataType(); + if (realNameWithType.equals(column)) { + diff --; + break; + } + } + + } + + return diff == 0; + } else {// table level check or multi + if (mappingCols != null && mappingCols.size() > 0) { + int diff = mappingCols.size(); + for (String colName : mappingCols.keySet()) { + for (ColumnInfoDetail columnInfoDetail : cols) { + if (columnInfoDetail.getFieldName().equals(colName) && columnInfoDetail.getDataType().equals(mappingCols.get(colName))) { + diff --; + break; + } + } + } + return diff == 0; + } + return CollectionUtils.isNotEmpty(cols); + } + } + + @Override + public GeneralResponse getAllDataSourceTypes(String clusterName, String authUser) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceTypes()).toString(); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get data source types by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finish to get data source types by user and cluster by linkis. response: {}", response); + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + List types = (List) data.get("type_list"); + + return new GeneralResponse<>("200", "Success to get all datasource types", data); + } + + @Override + public GeneralResponse getDataSourceEnv(String clusterName, String authUser) + throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceEnv()).toString(); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get data source env by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finish to get data source env by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + List types = (List) data.get("query_list"); + return new GeneralResponse<>("200", "Success to get datasource env", data); + } + + @Override + public GeneralResponse getDataSourceInfoPage(String clusterName, String authUser, int page, int size, String searchName, + Long typeId) throws UnExpectedRequestException, MetaDataAcquireFailedException, UnsupportedEncodingException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + UriBuilder uriBuilder = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceInfo()) + .queryParam("currentPage", page).queryParam("pageSize", size); + if (StringUtils.isNotBlank(searchName)) { + uriBuilder.queryParam("name", searchName); + } + if (typeId != null) { + uriBuilder.queryParam("typeId", typeId); + } + + String url = uriBuilder.toString(); + url = URLDecoder.decode(url, "UTF-8"); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get data source info by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finish to get data source info by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + List types = (List) data.get("query_list"); + return new GeneralResponse<>("200", "Success to get datasource info", data); + } + + @Override + public GeneralResponse getDataSourceVersions(String clusterName, String authUser, Long dataSourceId) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceVersions()).toString().replace("{DATA_SOURCE_ID}", dataSourceId.toString()); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get data source versions by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finish to get data source versions by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + List types = (List) data.get("versions"); + return new GeneralResponse<>("200", "Success to get datasource version", data); + } + + @Override + public GeneralResponse getDataSourceInfoDetail(String clusterName, String authUser, Long dataSourceId, Long versionId) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + UriBuilder uriBuilder = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceInfo()).path(dataSourceId.toString()); + if (versionId != null) { + uriBuilder.path(versionId.toString()); + } + String url = uriBuilder.toString(); + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get data source info detail by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finish to get data source info detail by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + Map types = (Map) data.get("info"); + return new GeneralResponse<>("200", "Success to get datasource detail info", data); + } + + @Override + public GeneralResponse getDataSourceInfoDetailByName(String clusterName, String authUser, String dataSourceName) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + UriBuilder uriBuilder = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceInfoName()).path(dataSourceName); + + String url = uriBuilder.toString(); + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get data source info detail by user and cluster and name by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finish to get data source info detail by user and cluster and name by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + return new GeneralResponse<>("200", "Success to get datasource info detail by datasource name", data); + } + + @Override + public GeneralResponse getDataSourceKeyDefine(String clusterName, String authUser, Long keyId) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceKeyDefine()).path(keyId.toString()).toString(); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get data source key define by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finish to get data source key define by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + return new GeneralResponse<>("200", "Success to get datasource key define", data); + } + + @Override + public GeneralResponse connectDataSource(String clusterName, String authUser, String jsonRequest) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceConnect()).toString(); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(jsonRequest, headers); + LOGGER.info("Start to connect data source by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.POST, entity); + Map response = restTemplate.exchange(url, HttpMethod.POST, entity, Map.class).getBody(); + LOGGER.info("Finish to connect data source by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + return new GeneralResponse<>("200", "{&CONNECT_SUCCESS}", data); + } + + + @Override + public GeneralResponse getDataSourceConnectParams(String clusterName, String authUser, Long dataSourceId, Long versionId) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + UriBuilder uriBuilder = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceConnectParam()); + if (versionId != null) { + uriBuilder.path(versionId.toString()); + } + String url = uriBuilder.toString().replace("{DATA_SOURCE_ID}", dataSourceId.toString()); + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get data source connect params by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finish to get data source connect params by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + + return new GeneralResponse<>("200", "Success to get datasource connect params", data); + } + + @Override + public GeneralResponse publishDataSource(String clusterName, String authUser, Long dataSourceId, Long versionId) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourcePublish()).path(dataSourceId.toString()).path(versionId.toString()).toString(); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to publish data source by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.POST, entity); + Map response = restTemplate.exchange(url, HttpMethod.POST, entity, Map.class).getBody(); + LOGGER.info("Finish to publish data source by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + return new GeneralResponse<>("200", "Success to publish datasource", data); + } + + @Override + public GeneralResponse expireDataSource(String clusterName, String authUser, Long dataSourceId) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceExpire()).toString().replace("{DATA_SOURCE_ID}", dataSourceId.toString()); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to expire data source by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.PUT, entity); + Map response = restTemplate.exchange(url, HttpMethod.PUT, entity, Map.class).getBody(); + LOGGER.info("Finish to expire data source by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + return new GeneralResponse<>("200", "Success to expire datasource", data); + } + + @Override + public GeneralResponse modifyDataSource(String clusterName, String authUser, Long dataSourceId, String jsonRequest) + throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceModify()).toString().replace("{DATA_SOURCE_ID}", dataSourceId.toString()); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(jsonRequest, headers); + LOGGER.info("Start to modify data source by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.PUT, entity); + Map response = restTemplate.exchange(url, HttpMethod.PUT, entity, Map.class).getBody(); + LOGGER.info("Finish to modify data source by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + return new GeneralResponse<>("200", "Success to modify datasource", data); + } + + @Override + public GeneralResponse modifyDataSourceParam(String clusterName, String authUser, Long dataSourceId, String jsonRequest) + throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceInitVersion()).toString().replace("{DATA_SOURCE_ID}", dataSourceId.toString()); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(jsonRequest, headers); + LOGGER.info("Start to modify data source param by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.POST, entity); + Map response = restTemplate.exchange(url, HttpMethod.POST, entity, Map.class).getBody(); + LOGGER.info("Finish to modify data source param by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + return new GeneralResponse<>("200", "Success to modify datasource connect params", data); + } + + @Override + public GeneralResponse createDataSource(String clusterName, String authUser, String jsonRequest) + throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceCreate()).toString(); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(jsonRequest, headers); + LOGGER.info("Start to create data source by user and cluster by linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.POST, entity); + Map response = restTemplate.exchange(url, HttpMethod.POST, entity, Map.class).getBody(); + LOGGER.info("Finish to create data source by user and cluster by linkis. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + return new GeneralResponse<>("200", "Success to create datasource", data); + } + + @Override + public Map getDbsByDataSource(String clusterName, String authUser, Long dataSourceId) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceDb()).queryParam("system", "Qualitis").toString().replace("{DATA_SOURCE_ID}", dataSourceId.toString()); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get dbs by data source. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finish to get dbs by data source. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + List dbs = (List) data.get("dbs"); + if (CollectionUtils.isEmpty(dbs)) { + LOGGER.info("No dbs with data source to be choosed."); + } + return data; + } + + @Override + public Map getTablesByDataSource(String clusterName, String authUser, Long dataSourceId, String dbName) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceTable()).queryParam("system", "Qualitis").toString() + .replace("{DATA_SOURCE_ID}", dataSourceId.toString()) + .replace("{DATA_SOURCE_DB}", dbName); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get tables by data source. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finish to get tables by data source. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + List tables = (List) data.get("tables"); + if (CollectionUtils.isEmpty(tables)) { + LOGGER.info("No tables with data source to be choosed."); + } + return data; + } + + @Override + public DataInfo getColumnsByDataSource(String clusterName, String authUser, Long dataSourceId, String dbName, String tableName) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getDatasourceColumn()).queryParam("system", "Qualitis").toString() + .replace("{DATA_SOURCE_ID}", dataSourceId.toString()) + .replace("{DATA_SOURCE_DB}", dbName) + .replace("{DATA_SOURCE_TABLE}", tableName); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Token-User", authUser); + headers.add("Token-Code", clusterInfo.getLinkisToken()); + + HttpEntity entity = new HttpEntity<>(headers); + LOGGER.info("Start to get columns by data source. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity); + Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody(); + LOGGER.info("Finish to get columns by data source. response: {}", response); + + if (! checkResponse(response)) { + String message = (String) response.get("message"); + LOGGER.error("Error! Can not get meta data from linkis, message: " + message); + throw new MetaDataAcquireFailedException("Error! Can not get meta data from linkis, exception: " + message); + } + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + DataInfo result = new DataInfo<>(); + List tables = (List) data.get("columns"); + if (CollectionUtils.isEmpty(tables)) { + LOGGER.info("No columns with data source to be choosed."); + } else { + List columnInfoDetailList = new ArrayList<>(tables.size()); + for (Map map : tables) { + ColumnInfoDetail columnInfoDetail = new ColumnInfoDetail(); + columnInfoDetail.setFieldName((String) map.get("name")); + columnInfoDetail.setDataType((String) map.get("type")); + columnInfoDetailList.add(columnInfoDetail); + } + result.setTotalCount(columnInfoDetailList.size()); + result.setContent(columnInfoDetailList); + } + return result; + } + + private ClusterInfo checkClusterNameExists(String clusterName) throws UnExpectedRequestException { ClusterInfo clusterInfo = clusterInfoDao.findByClusterName(clusterName); if (clusterInfo == null) { throw new UnExpectedRequestException(String.format("%s 集群名称不存在", clusterName)); @@ -300,4 +1074,5 @@ private boolean checkResponse(Map response) { Integer responseStatus = (Integer) response.get("status"); return responseStatus == 0; } + } diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/constant/RuleMetricBussCodeEnum.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/constant/RuleMetricBussCodeEnum.java new file mode 100644 index 00000000..3fea1c0a --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/constant/RuleMetricBussCodeEnum.java @@ -0,0 +1,45 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.constant; + +/** + * @author allenzhou + */ +public enum RuleMetricBussCodeEnum { + /** + * Dimension + */ + SUBSYSTEM(1, "子系统维度"), + PRODUCT(2, "二级产品维度"), + CUSTOM(3, "自定义维度"); + + private Integer code; + private String message; + + RuleMetricBussCodeEnum(Integer code, String message) { + this.code = code; + this.message = message; + } + + public Integer getCode() { + return code; + } + + public String getMessage() { + return message; + } +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/constant/RuleMetricLevelEnum.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/constant/RuleMetricLevelEnum.java new file mode 100644 index 00000000..e10081e3 --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/constant/RuleMetricLevelEnum.java @@ -0,0 +1,45 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.constant; + +/** + * @author allenzhou + */ +public enum RuleMetricLevelEnum { + /** + * Level for authority management + */ + DEFAULT_METRIC(1, "内置指标"), + DEPARTMENT_METRIC(2, "部门指标"), + PERSONAL_METRIC(3, "个人指标"); + + private Integer code; + private String message; + + RuleMetricLevelEnum(Integer code, String message) { + this.code = code; + this.message = message; + } + + public Integer getCode() { + return code; + } + + public String getMessage() { + return message; + } +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/RuleMetricDao.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/RuleMetricDao.java new file mode 100644 index 00000000..71dee3da --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/RuleMetricDao.java @@ -0,0 +1,188 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.dao; + +import com.webank.wedatasphere.qualitis.entity.Department; +import com.webank.wedatasphere.qualitis.entity.RuleMetric; +import com.webank.wedatasphere.qualitis.entity.User; +import java.util.List; + +/** + * @author allenzhou + */ +public interface RuleMetricDao { + /** + * Query pageable rule metrics with SYS_ADMIN. + * @param subSystemName + * @param ruleMetricName + * @param enCode + * @param type + * @param available + * @param page + * @param size + * @return + */ + List queryAllRuleMetrics(String subSystemName, String ruleMetricName, String enCode, Integer type, Boolean available, int page, + int size); + + /** + * Count query rule metrics with SYS_ADMIN. + * @param subSystemName + * @param ruleMetricName + * @param enCode + * @param type + * @param available + * @return + */ + long countQueryAllRuleMetrics(String subSystemName, String ruleMetricName, String enCode, Integer type, Boolean available); + + /** + * Query pageable rule metrics with different characters(DEPARTMENT_ADMIN, PROJECTOR). + * @param level + * @param departmentList + * @param user + * @param subSystemName + * @param ruleMetricName + * @param enCode + * @param type + * @param available + * @param page + * @param size + * @return + */ + List queryRuleMetrics(Integer level, List departmentList, + User user, String subSystemName, String ruleMetricName, String enCode, Integer type, Boolean available, int page, int size); + + /** + * Count query rule metrics with different characters(DEPARTMENT_ADMIN, PROJECTOR). + * @param level + * @param departmentList + * @param user + * @param subSystemName + * @param ruleMetricName + * @param enCode + * @param type + * @param available + * @return + */ + long countQueryRuleMetrics(Integer level, List departmentList, + User user, String subSystemName, String ruleMetricName, String enCode, Integer type, Boolean available); + + /** + * Query rule metrics with name. + * @param level + * @param departmentList + * @param user + * @param name + * @param page + * @param size + * @return + */ + List findWithRuleMetricName(Integer level, List departmentList, User user, String name, int page, int size); + + /** + * Count of querying rule metrics with name. + * @param level + * @param departmentList + * @param user + * @param name + * @return + */ + long countWithRuleMetricName(Integer level, List departmentList, User user, String name); + + /** + * Find all rule metrics. + * @param page + * @param size + * @return + */ + List findAllRuleMetrics(int page, int size); + + /** + * Count all rule metrics. + * @return + */ + long countAllRuleMetrics(); + + /** + * Find pageable rule metrics with different characters(SYS_ADMIN, DEPARTMENT_ADMIN, PROJECTOR). + * @param level + * @param departmentList + * @param user + * @param page + * @param size + * @return + */ + List findRuleMetrics(Integer level, List departmentList, User user, int page, int size); + + /** + * Count all rule metrics with different characters(SYS_ADMIN, DEPARTMENT_ADMIN, PROJECTOR). + * @param level + * @param departmentList + * @param user + * @return + */ + long countRuleMetrics(Integer level, List departmentList, User user); + + /** + * Add + * @param ruleMetric + * @return + */ + RuleMetric add(RuleMetric ruleMetric); + + /** + * Modify + * @param ruleMetric + * @return + */ + RuleMetric modify(RuleMetric ruleMetric); + + /** + * Delete + * @param ruleMetric + */ + void delete(RuleMetric ruleMetric); + + /** + * Find by id. + * @param id + * @return + */ + RuleMetric findById(long id); + + /** + * Find by en code. + * @param name + * @return + */ + RuleMetric findByEnCode(String name); + + /** + * Find by IDs. + * @param ids + * @return + */ + List findByIds(List ids); + + /** + * Find by name. + * @param name + * @return + */ + RuleMetric findByName(String name); +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/RuleMetricDepartmentUserDao.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/RuleMetricDepartmentUserDao.java new file mode 100644 index 00000000..51954f88 --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/RuleMetricDepartmentUserDao.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.dao; + +import com.webank.wedatasphere.qualitis.entity.RuleMetric; +import com.webank.wedatasphere.qualitis.entity.RuleMetricDepartmentUser; + +/** + * @author allenzhou + */ +public interface RuleMetricDepartmentUserDao { + /** + * Add + * @param ruleMetricDepartmentUser + * @return + */ + RuleMetricDepartmentUser add(RuleMetricDepartmentUser ruleMetricDepartmentUser); + + /** + * Modify + * @param ruleMetricDepartmentUser + * @return + */ + RuleMetricDepartmentUser modify(RuleMetricDepartmentUser ruleMetricDepartmentUser); + + /** + * Delete + * @param ruleMetricDepartmentUser + */ + void delete(RuleMetricDepartmentUser ruleMetricDepartmentUser); + + /** + * Find by rule metric. + * @param ruleMetricInDb + * @return + */ + RuleMetricDepartmentUser findByRuleMetric(RuleMetric ruleMetricInDb); +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/RuleMetricTypeConfigDao.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/RuleMetricTypeConfigDao.java new file mode 100644 index 00000000..a5283461 --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/RuleMetricTypeConfigDao.java @@ -0,0 +1,31 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.dao; + +import com.webank.wedatasphere.qualitis.entity.RuleMetricTypeConfig; +import java.util.List; + +/** + * @author allenzhou + */ +public interface RuleMetricTypeConfigDao { + /** + * Find all rule metric type config. + * @return + */ + List findAllRuleMetricTypeConfig(); +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/RuleMetricDaoImpl.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/RuleMetricDaoImpl.java new file mode 100644 index 00000000..f16d1561 --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/RuleMetricDaoImpl.java @@ -0,0 +1,122 @@ +package com.webank.wedatasphere.qualitis.dao.impl; + +import com.webank.wedatasphere.qualitis.dao.RuleMetricDao; +import com.webank.wedatasphere.qualitis.dao.repository.RuleMetricRepository; +import com.webank.wedatasphere.qualitis.entity.Department; +import com.webank.wedatasphere.qualitis.entity.RuleMetric; +import com.webank.wedatasphere.qualitis.entity.User; +import java.util.List; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.stereotype.Repository; + +/** + * @author allenzhou + */ +@Repository +public class RuleMetricDaoImpl implements RuleMetricDao { + @Autowired + private RuleMetricRepository ruleMetricRepository; + + @Override + public List queryAllRuleMetrics(String subSystemName, String ruleMetricName, String enCode, Integer type, Boolean available, int page, int size) { + Sort sort = new Sort(Sort.Direction.ASC, "id"); + Pageable pageable = PageRequest.of(page, size, sort); + return ruleMetricRepository.queryAll(subSystemName, ruleMetricName, enCode, type, available, pageable).getContent(); + } + + @Override + public long countQueryAllRuleMetrics(String subSystemName, String ruleMetricName, String enCode, Integer type, Boolean available) { + return ruleMetricRepository.countQueryAll(subSystemName, ruleMetricName, enCode, type, available); + } + + @Override + public List queryRuleMetrics(Integer level, List departmentList, User user, String subSystemName, String ruleMetricName + , String enCode, Integer type, Boolean available, int page, int size) { + Sort sort = new Sort(Sort.Direction.ASC, "id"); + Pageable pageable = PageRequest.of(page, size, sort); + return ruleMetricRepository.queryRuleMetrics(level, departmentList, user, subSystemName, ruleMetricName, enCode, type, available, pageable).getContent(); + } + + @Override + public long countQueryRuleMetrics(Integer level, List departmentList, User user, String subSystemName, String ruleMetricName, String enCode + , Integer type, Boolean available) { + return ruleMetricRepository.countQueryRuleMetrics(level, departmentList, user, subSystemName, ruleMetricName, enCode, type, available); + } + + @Override + public List findWithRuleMetricName(Integer level, List departmentList, + User user, String name, int page, int size) { + Sort sort = new Sort(Sort.Direction.ASC, "id"); + Pageable pageable = PageRequest.of(page, size, sort); + return ruleMetricRepository.findWithRuleMetricName(level, departmentList, user, "%".concat(name).concat("%"), pageable).getContent(); + } + + @Override + public long countWithRuleMetricName(Integer level, List departmentList, + User user, String name) { + return ruleMetricRepository.countWithRuleMetricName(level, departmentList, user, "%".concat(name).concat("%")); + } + + @Override + public List findAllRuleMetrics(int page, int size) { + Sort sort = new Sort(Sort.Direction.ASC, "id"); + Pageable pageable = PageRequest.of(page, size, sort); + return ruleMetricRepository.findAll(pageable).getContent(); + } + + @Override + public long countAllRuleMetrics() { + return ruleMetricRepository.count(); + } + + @Override + public List findRuleMetrics(Integer level, List departmentList, + User user, int page, int size) { + Sort sort = new Sort(Sort.Direction.ASC, "id"); + Pageable pageable = PageRequest.of(page, size, sort); + return ruleMetricRepository.findRuleMetrics(level, departmentList, user, pageable).getContent(); + } + + @Override + public long countRuleMetrics(Integer level, List departmentList, User user) { + return ruleMetricRepository.countRuleMetrics(level, departmentList, user); + } + + @Override + public RuleMetric add(RuleMetric ruleMetric) { + return ruleMetricRepository.save(ruleMetric); + } + + @Override + public RuleMetric modify(RuleMetric ruleMetric) { + return ruleMetricRepository.save(ruleMetric); + } + + @Override + public void delete(RuleMetric ruleMetric) { + ruleMetricRepository.delete(ruleMetric); + } + + @Override + public RuleMetric findById(long id) { + return ruleMetricRepository.findById(id).get(); + } + + @Override + public RuleMetric findByEnCode(String enCode) { + return ruleMetricRepository.findByEnCode(enCode); + } + + @Override + public List findByIds(List ids) { + return ruleMetricRepository.findAllById(ids); + } + + @Override + public RuleMetric findByName(String name) { + return ruleMetricRepository.findByName(name); + } +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/RuleMetricDepartmentUserDaoImpl.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/RuleMetricDepartmentUserDaoImpl.java new file mode 100644 index 00000000..35f1adea --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/RuleMetricDepartmentUserDaoImpl.java @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.qualitis.dao.impl; + +import com.webank.wedatasphere.qualitis.dao.RuleMetricDepartmentUserDao; +import com.webank.wedatasphere.qualitis.dao.repository.RuleMetricDepartmentUserRepository; +import com.webank.wedatasphere.qualitis.entity.RuleMetric; +import com.webank.wedatasphere.qualitis.entity.RuleMetricDepartmentUser; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +/** + * @author allenzhou + */ +@Repository +public class RuleMetricDepartmentUserDaoImpl implements RuleMetricDepartmentUserDao { + @Autowired + private RuleMetricDepartmentUserRepository ruleMetricDepartmentUserRepository; + + @Override + public RuleMetricDepartmentUser add(RuleMetricDepartmentUser ruleMetricDepartmentUser) { + return ruleMetricDepartmentUserRepository.save(ruleMetricDepartmentUser); + } + + @Override + public RuleMetricDepartmentUser modify(RuleMetricDepartmentUser ruleMetricDepartmentUser) { + return ruleMetricDepartmentUserRepository.save(ruleMetricDepartmentUser); + } + + @Override + public void delete(RuleMetricDepartmentUser ruleMetricDepartmentUser) { + ruleMetricDepartmentUserRepository.delete(ruleMetricDepartmentUser); + } + + @Override + public RuleMetricDepartmentUser findByRuleMetric(RuleMetric ruleMetricInDb) { + return ruleMetricDepartmentUserRepository.findByRuleMetric(ruleMetricInDb); + } +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/RuleMetricTypeConfigDaoImpl.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/RuleMetricTypeConfigDaoImpl.java new file mode 100644 index 00000000..8c39cbcf --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/RuleMetricTypeConfigDaoImpl.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.qualitis.dao.impl; + +import com.webank.wedatasphere.qualitis.dao.RuleMetricTypeConfigDao; +import com.webank.wedatasphere.qualitis.dao.repository.RuleMetricTypeConfigRepository; +import com.webank.wedatasphere.qualitis.entity.RuleMetricTypeConfig; +import java.util.List; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +/** + * @author allenzhou + */ +@Repository +public class RuleMetricTypeConfigDaoImpl implements RuleMetricTypeConfigDao { + @Autowired + private RuleMetricTypeConfigRepository ruleMetricTypeConfigRepository; + + @Override + public List findAllRuleMetricTypeConfig() { + return ruleMetricTypeConfigRepository.findAll(); + } +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/RuleMetricDepartmentUserRepository.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/RuleMetricDepartmentUserRepository.java new file mode 100644 index 00000000..3c2bc639 --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/RuleMetricDepartmentUserRepository.java @@ -0,0 +1,50 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.dao.repository; + +import com.webank.wedatasphere.qualitis.entity.Department; +import com.webank.wedatasphere.qualitis.entity.RuleMetric; +import com.webank.wedatasphere.qualitis.entity.RuleMetricDepartmentUser; +import com.webank.wedatasphere.qualitis.entity.User; +import java.util.List; +import org.springframework.data.jpa.repository.JpaRepository; + +/** + * @author howeye + */ +public interface RuleMetricDepartmentUserRepository extends JpaRepository { + /** + * Find by user. + * @param user + * @return + */ + List findByUser(User user); + + /** + * Find by department. + * @param department + * @return + */ + List findByDepartment(Department department); + + /** + * Find by rule metric. + * @param ruleMetricInDb + * @return + */ + RuleMetricDepartmentUser findByRuleMetric(RuleMetric ruleMetricInDb); +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/RuleMetricRepository.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/RuleMetricRepository.java new file mode 100644 index 00000000..e30d2c78 --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/RuleMetricRepository.java @@ -0,0 +1,146 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.dao.repository; + +import com.webank.wedatasphere.qualitis.entity.Department; +import com.webank.wedatasphere.qualitis.entity.RuleMetric; +import com.webank.wedatasphere.qualitis.entity.User; +import java.util.List; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; + +/** + * @author allenzhou + */ +public interface RuleMetricRepository extends JpaRepository { + /** + * Query pageable rule metrics with SYS_ADMIN. + * @param subSystemName + * @param ruleMetricName + * @param enCode + * @param type + * @param available + * @param pageable + * @return + */ + @Query(value = "SELECT qrm FROM RuleMetric qrm where (LENGTH(?1) = 0 OR qrm.subSystemName = ?1) AND (LENGTH(?2) = 0 OR qrm.name LIKE ?2) AND (LENGTH(?3) = 0 or qrm.enCode = ?3) AND (?4 is null or qrm.type = ?4) AND (?5 is null or qrm.available = ?5)") + Page queryAll(String subSystemName, String ruleMetricName, String enCode, Integer type, Boolean available, Pageable pageable); + + /** + * Count query rule metrics with SYS_ADMIN. + * @param subSystemName + * @param ruleMetricName + * @param enCode + * @param type + * @param available + * @return + */ + @Query(value = "SELECT count(qrm.id) FROM RuleMetric qrm where (LENGTH(?1) = 0 OR qrm.subSystemName = ?1) AND (LENGTH(?2) = 0 OR qrm.name LIKE ?2) AND (LENGTH(?3) = 0 or qrm.enCode = ?3) AND (?4 is null or qrm.type = ?4) AND (?5 is null or qrm.available = ?5)") + long countQueryAll(String subSystemName, String ruleMetricName, String enCode, Integer type, Boolean available); + + /** + * Query pageable rule metrics with different characters(DEPARTMENT_ADMIN, PROJECTOR). + * @param level + * @param departmentList + * @param user + * @param subSystemName + * @param ruleMetricName + * @param enCode + * @param type + * @param available + * @param pageable + * @return + */ + @Query(value = "SELECT qrm FROM RuleMetric qrm where (qrm.level = ?1 OR (qrm IN (SELECT qrmdu.ruleMetric FROM RuleMetricDepartmentUser qrmdu where qrmdu.department in (?2) OR ?3 is null OR qrmdu.user = ?3))) AND (?4 = '' OR qrm.subSystemName = ?4) AND (?5 = '' OR qrm.name LIKE ?5) AND (?6 = '' OR qrm.enCode = ?6) AND (?7 is null OR qrm.type = ?7) AND (?8 is null or qrm.available = ?8)") + Page queryRuleMetrics(Integer level, List departmentList, User user, String subSystemName, String ruleMetricName, String enCode, Integer type, Boolean available, Pageable pageable); + + /** + * Count query rule metrics with different characters(DEPARTMENT_ADMIN, PROJECTOR). + * @param level + * @param departmentList + * @param user + * @param subSystemName + * @param ruleMetricName + * @param enCode + * @param type + * @param available + * @return + */ + @Query(value = "SELECT count(qrm.id) FROM RuleMetric qrm where (qrm.level = ?1 OR (qrm IN (SELECT qrmdu.ruleMetric FROM RuleMetricDepartmentUser qrmdu where qrmdu.department in (?2) OR ?3 is null OR qrmdu.user = ?3))) AND (?4 = '' OR qrm.subSystemName = ?4) AND (?5 = '' OR qrm.name LIKE ?5) AND (?6 = '' OR qrm.enCode = ?6) AND (?7 is null OR qrm.type = ?7) AND (?8 is null or qrm.available = ?8)") + long countQueryRuleMetrics(Integer level, List departmentList, User user, String subSystemName, String ruleMetricName, String enCode, Integer type, Boolean available); + + /** + * Find pageable rule metrics with different characters(DEPARTMENT_ADMIN, PROJECTOR). + * @param level + * @param departmentList + * @param user + * @param pageable + * @return + */ + @Query(value = "SELECT qrm FROM RuleMetric qrm where qrm.level = ?1 OR (qrm IN (SELECT qrmdu.ruleMetric FROM RuleMetricDepartmentUser qrmdu where qrmdu.department in (?2) OR qrmdu.user = ?3))") + Page findRuleMetrics(Integer level, List departmentList, User user, Pageable pageable); + + /** + * Count all rule metrics with different characters(DEPARTMENT_ADMIN, PROJECTOR). + * @param level + * @param departmentList + * @param user + * @return + */ + @Query(value = "SELECT count(qrm.id) FROM RuleMetric qrm where qrm.level = ?1 OR (qrm IN (SELECT qrmdu.ruleMetric FROM RuleMetricDepartmentUser qrmdu where qrmdu.department in (?2) OR qrmdu.user = ?3))") + long countRuleMetrics(Integer level, List departmentList, User user); + + /** + * Query rule metrics with name. + * @param level + * @param departmentList + * @param user + * @param name + * @param pageable + * @return + */ + @Query(value = "SELECT qrm FROM RuleMetric qrm where qrm.level = ?1 AND qrm.name LIKE ?4 OR (qrm IN (SELECT qrmdu.ruleMetric FROM RuleMetricDepartmentUser qrmdu where qrmdu.department in (?2) OR qrmdu.user IN (?3)))") + Page findWithRuleMetricName(Integer level, List departmentList, User user, String name, Pageable pageable); + + /** + * Count of querying rule metrics with name. + * @param level + * @param departmentList + * @param user + * @param name + * @return + */ + @Query(value = "SELECT count(qrm.id) FROM RuleMetric qrm where qrm.level = ?1 AND qrm.name LIKE ?4 OR (qrm IN (SELECT qrmdu.ruleMetric FROM RuleMetricDepartmentUser qrmdu where qrmdu.department in (?2) OR qrmdu.user IN (?3)))") + long countWithRuleMetricName(Integer level, List departmentList, User user, String name); + + /** + * Find by name. + * @param name + * @return + */ + RuleMetric findByName(String name); + + /** + * Find by en code. + * @param enCode + * @return + */ + @Query(value = "SELECT qrm FROM RuleMetric qrm where qrm.enCode = ?1") + RuleMetric findByEnCode(String enCode); +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/RuleMetricTypeConfigRepository.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/RuleMetricTypeConfigRepository.java new file mode 100644 index 00000000..03727c57 --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/RuleMetricTypeConfigRepository.java @@ -0,0 +1,26 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.dao.repository; + +import com.webank.wedatasphere.qualitis.entity.RuleMetricTypeConfig; +import org.springframework.data.jpa.repository.JpaRepository; + +/** + * @author allenzhou + */ +public interface RuleMetricTypeConfigRepository extends JpaRepository { +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/entity/RuleMetric.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/entity/RuleMetric.java new file mode 100644 index 00000000..5a48ecc9 --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/entity/RuleMetric.java @@ -0,0 +1,292 @@ +package com.webank.wedatasphere.qualitis.entity; + +import com.webank.wedatasphere.qualitis.constant.RuleMetricBussCodeEnum; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; + +/** + * @author allenzhou + */ +@Entity +@Table(name = "qualitis_rule_metric") +public class RuleMetric { + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Column(name = "name") + private String name; + @Column(name = "cn_name") + private String cnName; + + @Column(name = "metric_desc") + private String metricDesc; + + @Column(name = "sub_system_name") + private String subSystemName; + @Column(name = "full_cn_name") + private String fullCnName; + + @Column(name = "product_name") + private String productName; + + @Column(name = "department_name") + private String departmentName; + + + @Column(name = "dev_department_name") + private String devDepartmentName; + @Column(name = "ops_department_name") + private String opsDepartmentName; + + @Column(name = "metric_level") + private Integer level; + + @Column(name = "create_user", length = 50) + private String createUser; + @Column(name = "create_time", length = 25) + private String createTime; + @Column(name = "modify_user", length = 50) + private String modifyUser; + @Column(name = "modify_time", length = 25) + private String modifyTime; + + @Column(name = "type") + private Integer type; + @Column(name = "en_code") + private String enCode; + + @Column(name = "available") + private Boolean available; + @Column(name = "frequency") + private Integer frequency; + + @Column(name = "buss_code") + private Integer bussCode; + @Column(name = "buss_custom") + private String bussCustom; + + public RuleMetric(String name, String cnName, String desc, String subSystemName, String fullCnName, String productName, String departmentName + , String devDepartmentName, String opsDepartmentName, Integer type, String enCode, Integer frequency, Boolean available, Integer bussCode + , String bussCustom) { + this.name = name; + this.cnName = cnName; + this.metricDesc = desc; + this.bussCode = bussCode; + + if (RuleMetricBussCodeEnum.SUBSYSTEM.getCode().equals(bussCode)) { + this.subSystemName = subSystemName; + this.fullCnName = fullCnName; + } else if (RuleMetricBussCodeEnum.PRODUCT.getCode().equals(bussCode)) { + this.productName = productName; + } else if (RuleMetricBussCodeEnum.CUSTOM.getCode().equals(bussCode)) { + this.bussCustom = bussCustom; + } + this.departmentName = departmentName; + this.devDepartmentName = devDepartmentName; + this.opsDepartmentName = opsDepartmentName; + + this.type = type; + this.enCode = enCode; + this.frequency = frequency; + this.available = available; + } + + public RuleMetric() { + + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getCnName() { + return cnName; + } + + public void setCnName(String cnName) { + this.cnName = cnName; + } + + public String getMetricDesc() { + return metricDesc; + } + + public void setMetricDesc(String metricDesc) { + this.metricDesc = metricDesc; + } + + + public String getSubSystemName() { + return subSystemName; + } + + public void setSubSystemName(String subSystemName) { + this.subSystemName = subSystemName; + } + + public String getFullCnName() { + return fullCnName; + } + + public void setFullCnName(String fullCnName) { + this.fullCnName = fullCnName; + } + + public String getProductName() { + return productName; + } + + public void setProductName(String productName) { + this.productName = productName; + } + + public String getDepartmentName() { + return departmentName; + } + + public void setDepartmentName(String departmentName) { + this.departmentName = departmentName; + } + + public String getDevDepartmentName() { + return devDepartmentName; + } + + public void setDevDepartmentName(String devDepartmentName) { + this.devDepartmentName = devDepartmentName; + } + + public String getOpsDepartmentName() { + return opsDepartmentName; + } + + public void setOpsDepartmentName(String opsDepartmentName) { + this.opsDepartmentName = opsDepartmentName; + } + + public Integer getLevel() { + return level; + } + + public void setLevel(Integer level) { + this.level = level; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getCreateTime() { + return createTime; + } + + public void setCreateTime(String createTime) { + this.createTime = createTime; + } + + public String getModifyUser() { + return modifyUser; + } + + public void setModifyUser(String modifyUser) { + this.modifyUser = modifyUser; + } + + public String getModifyTime() { + return modifyTime; + } + + public void setModifyTime(String modifyTime) { + this.modifyTime = modifyTime; + } + + public Integer getType() { + return type; + } + + public void setType(Integer type) { + this.type = type; + } + + public String getEnCode() { + return enCode; + } + + public void setEnCode(String enCode) { + this.enCode = enCode; + } + + public Boolean getAvailable() { + return available; + } + + public void setAvailable(Boolean available) { + this.available = available; + } + + public Integer getFrequency() { + return frequency; + } + + public void setFrequency(Integer frequency) { + this.frequency = frequency; + } + + public Integer getBussCode() { + return bussCode; + } + + public void setBussCode(Integer bussCode) { + this.bussCode = bussCode; + } + + public String getBussCustom() { + return bussCustom; + } + + public void setBussCustom(String bussCustom) { + this.bussCustom = bussCustom; + } + + @Override + public String toString() { + return "RuleMetric{" + + "id=" + id + + ", name='" + name + '\'' + + ", cnName='" + cnName + '\'' + + ", metricDesc='" + metricDesc + '\'' + + ", departmentName='" + departmentName + '\'' + + ", devDepartmentName='" + devDepartmentName + '\'' + + ", opsDepartmentName='" + opsDepartmentName + '\'' + + ", level=" + level + + ", createUser='" + createUser + '\'' + + ", createTime='" + createTime + '\'' + + ", type='" + type + '\'' + + ", enCode='" + enCode + '\'' + + ", available=" + available + + ", frequency=" + frequency + + ", bussCode=" + bussCode + + '}'; + } +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/entity/RuleMetricDepartmentUser.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/entity/RuleMetricDepartmentUser.java new file mode 100644 index 00000000..d1c9b202 --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/entity/RuleMetricDepartmentUser.java @@ -0,0 +1,72 @@ +package com.webank.wedatasphere.qualitis.entity; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.ManyToOne; +import javax.persistence.OneToOne; +import javax.persistence.Table; + +/** + * @author allenzhou + */ +@Entity +@Table(name = "qualitis_rule_metric_department_user") +public class RuleMetricDepartmentUser { + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @ManyToOne + private Department department; + + @ManyToOne + private User user; + + @OneToOne + private RuleMetric ruleMetric; + + public RuleMetricDepartmentUser() { + } + + public RuleMetricDepartmentUser(Department department, + User user, RuleMetric ruleMetric) { + this.department = department; + this.user = user; + this.ruleMetric = ruleMetric; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Department getDepartment() { + return department; + } + + public void setDepartment(Department department) { + this.department = department; + } + + public User getUser() { + return user; + } + + public void setUser(User user) { + this.user = user; + } + + public RuleMetric getRuleMetric() { + return ruleMetric; + } + + public void setRuleMetric(RuleMetric ruleMetric) { + this.ruleMetric = ruleMetric; + } +} diff --git a/core/metric/src/main/java/com/webank/wedatasphere/qualitis/entity/RuleMetricTypeConfig.java b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/entity/RuleMetricTypeConfig.java new file mode 100644 index 00000000..8cef7218 --- /dev/null +++ b/core/metric/src/main/java/com/webank/wedatasphere/qualitis/entity/RuleMetricTypeConfig.java @@ -0,0 +1,75 @@ +package com.webank.wedatasphere.qualitis.entity; + +import java.util.Objects; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; + +/** + * @author allenzhou + */ +@Entity +@Table(name = "qualitis_rule_metric_type_config") +public class RuleMetricTypeConfig { + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + @Column(name = "cn_name") + private String cnName; + @Column(name = "en_name") + private String enName; + + public RuleMetricTypeConfig() { + } + + public RuleMetricTypeConfig(String cnName, String enName) { + this.cnName = cnName; + this.enName = enName; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getCnName() { + return cnName; + } + + public void setCnName(String cnName) { + this.cnName = cnName; + } + + public String getEnName() { + return enName; + } + + public void setEnName(String enName) { + this.enName = enName; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RuleMetricTypeConfig that = (RuleMetricTypeConfig) o; + return Objects.equals(id, that.id) && + Objects.equals(cnName, that.cnName) && + Objects.equals(enName, that.enName); + } + + @Override + public int hashCode() { + return Objects.hash(id, cnName, enName); + } +} diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/bean/JobChecker.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/bean/JobChecker.java index a1a91278..cc4d5dbd 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/bean/JobChecker.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/bean/JobChecker.java @@ -22,11 +22,11 @@ * @author howeye */ public class JobChecker implements Comparable { - private String applicationId; private String username; private Integer count; private String oldStatus; + private Double oldProgress; private String ujesAddress; private String clusterName; private Task task; @@ -41,7 +41,18 @@ public JobChecker(String applicationId, String oldStatus, String username, Strin this.task = task; } - public Integer getTaskId() { + public JobChecker(String applicationId, String oldStatus, Double oldProgress, String username, String ujesAddress, String clusterName, Task task) { + this.applicationId = applicationId; + this.oldStatus = oldStatus; + this.oldProgress = oldProgress; + this.username = username; + count = 0; + this.ujesAddress = ujesAddress; + this.clusterName = clusterName; + this.task = task; + } + + public Long getTaskId() { return task.getTaskRemoteId(); } @@ -65,6 +76,14 @@ public void setOldStatus(String oldStatus) { this.oldStatus = oldStatus; } + public Double getOldProgress() { + return oldProgress; + } + + public void setOldProgress(Double oldProgress) { + this.oldProgress = oldProgress; + } + public void addCount() { count++; } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/config/ThreadPoolConfig.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/config/ThreadPoolConfig.java index 9f42091f..c3142079 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/config/ThreadPoolConfig.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/config/ThreadPoolConfig.java @@ -24,11 +24,12 @@ */ @Configuration public class ThreadPoolConfig { - @Value("${timer.thread.size}") private Integer size; @Value("${timer.check.period}") private Integer period; + @Value("${timer.check.update_job_size}") + private Integer updateJobSize; @Value("${timer.lock.zk.path}") private String lockZkPath; @@ -59,4 +60,12 @@ public String getLockZkPath() { public void setLockZkPath(String lockZkPath) { this.lockZkPath = lockZkPath; } + + public Integer getUpdateJobSize() { + return updateJobSize; + } + + public void setUpdateJobSize(Integer updateJobSize) { + this.updateJobSize = updateJobSize; + } } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/AbstractServiceCoordinator.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/AbstractServiceCoordinator.java index 0c2bccbf..5b5cf950 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/AbstractServiceCoordinator.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/AbstractServiceCoordinator.java @@ -4,7 +4,6 @@ * @author howeye */ public abstract class AbstractServiceCoordinator { - /** * Do some init work */ diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/HaAbstractServiceCoordinator.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/HaAbstractServiceCoordinator.java index b35ad4bb..881bf27a 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/HaAbstractServiceCoordinator.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/HaAbstractServiceCoordinator.java @@ -21,9 +21,6 @@ @Component @ConditionalOnProperty(name = "ha.enable", havingValue = "true") public class HaAbstractServiceCoordinator extends AbstractServiceCoordinator { - - private static final Logger LOGGER = LoggerFactory.getLogger(HaAbstractServiceCoordinator.class); - @Autowired private ZkConfig zkConfig; @@ -34,6 +31,8 @@ public class HaAbstractServiceCoordinator extends AbstractServiceCoordinator { private InterProcessLock lock; private boolean lockFlag = false; + private static final Logger LOGGER = LoggerFactory.getLogger(HaAbstractServiceCoordinator.class); + @Override public void init() { LOGGER.info("Start to create zookeeper client"); @@ -60,8 +59,12 @@ public void release() { lockFlag = false; try { lock.release(); + } catch (IllegalMonitorStateException e) { + LOGGER.error("Failed to release lock of zookeeper."); + LOGGER.error(e.getMessage(), e); } catch (Exception e) { - LOGGER.error("Failed to release lock of zookeeper", e); + LOGGER.error("Failed to release lock of zookeeper."); + LOGGER.error(e.getMessage(), e); } } } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/StandAloneAbstractServiceCoordinator.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/StandAloneAbstractServiceCoordinator.java index 1f6907b1..b4cad4da 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/StandAloneAbstractServiceCoordinator.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/StandAloneAbstractServiceCoordinator.java @@ -9,7 +9,6 @@ @Component @ConditionalOnProperty(name = "ha.enable", havingValue = "false") public class StandAloneAbstractServiceCoordinator extends AbstractServiceCoordinator { - @Override public void init() { // No need to init diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/MonitorManager.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/MonitorManager.java index bd613a61..0c0887ce 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/MonitorManager.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/MonitorManager.java @@ -24,13 +24,12 @@ import com.webank.wedatasphere.qualitis.exception.ClusterInfoNotConfigException; import com.webank.wedatasphere.qualitis.exception.LogPartialException; import com.webank.wedatasphere.qualitis.exception.TaskNotExistException; +import java.util.Map; /** * @author howeye */ public interface MonitorManager { - - /** * Get task status * @param taskId @@ -41,7 +40,7 @@ public interface MonitorManager { * @throws TaskNotExistException * @throws ClusterInfoNotConfigException */ - String getTaskStatus(Integer taskId, String user, String remoteAddress, String clusterName) throws TaskNotExistException, ClusterInfoNotConfigException; + Map getTaskStatus(Long taskId, String user, String remoteAddress, String clusterName) throws TaskNotExistException, ClusterInfoNotConfigException; /** @@ -55,6 +54,6 @@ public interface MonitorManager { * @throws LogPartialException * @throws ClusterInfoNotConfigException */ - LogResult getTaskPartialLog(Integer taskId, Integer begin, String user, String remoteAddress, String clusterName) throws LogPartialException, ClusterInfoNotConfigException; + LogResult getTaskPartialLog(Long taskId, Integer begin, String user, String remoteAddress, String clusterName) throws LogPartialException, ClusterInfoNotConfigException; } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/impl/MonitorManagerImpl.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/impl/MonitorManagerImpl.java index 61053743..a6206039 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/impl/MonitorManagerImpl.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/impl/MonitorManagerImpl.java @@ -22,7 +22,7 @@ import com.webank.wedatasphere.qualitis.exception.TaskNotExistException; import com.webank.wedatasphere.qualitis.exception.LogPartialException; import com.webank.wedatasphere.qualitis.job.MonitorManager; -import com.webank.wedatasphere.qualitis.job.MonitorManager; +import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -31,17 +31,16 @@ */ @Component public class MonitorManagerImpl implements MonitorManager { - @Autowired private AbstractJobSubmitter abstractJobSubmitter; @Override - public String getTaskStatus(Integer taskId, String user, String remoteAddress, String clusterName) throws TaskNotExistException, ClusterInfoNotConfigException { - return abstractJobSubmitter.getTaskStatus(taskId, user, remoteAddress, clusterName); + public Map getTaskStatus(Long taskId, String user, String remoteAddress, String clusterName) throws TaskNotExistException, ClusterInfoNotConfigException { + return abstractJobSubmitter.getTaskStatusAndProgressAndErrCode(taskId, user, remoteAddress, clusterName); } @Override - public LogResult getTaskPartialLog(Integer taskId, Integer begin, String user, String remoteAddress, String clusterName) throws LogPartialException, ClusterInfoNotConfigException { + public LogResult getTaskPartialLog(Long taskId, Integer begin, String user, String remoteAddress, String clusterName) throws LogPartialException, ClusterInfoNotConfigException { return abstractJobSubmitter.getJobPartialLog(taskId, begin, user, remoteAddress, clusterName); } } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/CheckerRunnable.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/CheckerRunnable.java index af57c235..1f5ee8ac 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/CheckerRunnable.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/CheckerRunnable.java @@ -17,6 +17,7 @@ package com.webank.wedatasphere.qualitis.timer; import com.webank.wedatasphere.qualitis.bean.JobChecker; +import com.webank.wedatasphere.qualitis.constant.ApplicationCommentEnum; import com.webank.wedatasphere.qualitis.constant.ApplicationStatusEnum; import com.webank.wedatasphere.qualitis.constant.TaskStatusEnum; @@ -25,6 +26,11 @@ import com.webank.wedatasphere.qualitis.entity.Application; import com.webank.wedatasphere.qualitis.entity.Task; import com.webank.wedatasphere.qualitis.ha.AbstractServiceCoordinator; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -36,19 +42,30 @@ * @author howeye */ public class CheckerRunnable implements Runnable { - - private ApplicationDao applicationDao; private TaskDao taskDao; + private int updateJobSize; private IChecker iChecker; + private ApplicationDao applicationDao; + private static final ThreadPoolExecutor POOL; private AbstractServiceCoordinator abstractServiceCoordinator; private static final Logger LOGGER = LoggerFactory.getLogger("monitor"); + static { + POOL = new ThreadPoolExecutor(50, + Integer.MAX_VALUE, + 60, + TimeUnit.SECONDS, + new ArrayBlockingQueue<>(1000), + new UpdaterThreadFactory(), + new ThreadPoolExecutor.DiscardPolicy()); + } - public CheckerRunnable(ApplicationDao applicationDao, TaskDao taskDao, IChecker iChecker, AbstractServiceCoordinator abstractServiceCoordinator) { + public CheckerRunnable(ApplicationDao applicationDao, TaskDao taskDao, IChecker iChecker, AbstractServiceCoordinator abstractServiceCoordinator, int updateSize) { this.applicationDao = applicationDao; this.taskDao = taskDao; this.iChecker = iChecker; + this.updateJobSize = updateSize; this.abstractServiceCoordinator = abstractServiceCoordinator; abstractServiceCoordinator.init(); @@ -57,7 +74,7 @@ public CheckerRunnable(ApplicationDao applicationDao, TaskDao taskDao, IChecker @Override public void run() { try { - LOGGER.info("Start to monitor application"); + LOGGER.info("Start to monitor application."); abstractServiceCoordinator.coordinate(); // Get task that is not finished @@ -65,20 +82,27 @@ public void run() { try { jobs = getJobs(); LOGGER.info("Succeed to find applications that are not end. Application: {}", jobs); - } catch (Exception t) { - LOGGER.error("Failed to find applications that are not end", t); + } catch (Exception e) { + LOGGER.error("Failed to find applications that are not end.", e); return; } - - for (JobChecker jobChecker : jobs) { - try { - iChecker.checkTaskStatus(jobChecker); - } catch (Exception t) { - LOGGER.error("Failed to check task status, application_id: {}, task_id: {}", jobChecker.getApplicationId(), jobChecker.getTaskId(), t); + int total = jobs.size(); + int updateThreadSize = total / updateJobSize + 1; + CountDownLatch latch = new CountDownLatch(updateThreadSize); + + for (int indexThread = 0; total > 0 && indexThread < total;) { + if (indexThread + updateJobSize < total) { + POOL.execute(new UpdaterRunnable(iChecker, jobs.subList(indexThread, indexThread + updateJobSize), latch)); + } else { + POOL.execute(new UpdaterRunnable(iChecker, jobs.subList(indexThread, total), latch)); } + indexThread += updateJobSize; + updateThreadSize --; } - - LOGGER.info("Finish to monitor application"); + if (total > 0 && updateThreadSize == 0) { + latch.await(); + } + LOGGER.info("Finish to monitor application."); } catch (Exception e) { LOGGER.error("Failed to monitor application, caused by: {}", e.getMessage(), e); } finally { @@ -96,35 +120,45 @@ public void run() { private List getJobs() { List notEndApplications = applicationDao.findByStatusNotIn(END_APPLICATION_STATUS_LIST); - List jobCheckers = new ArrayList<>(); for (Application app : notEndApplications) { // Find not end task List notEndTasks = taskDao.findByApplicationAndStatusInAndTaskRemoteIdNotNull(app, NOT_END_TASK_STATUS_LIST); for (Task task : notEndTasks) { - JobChecker tmp = new JobChecker(app.getId(), TaskStatusEnum.getTaskStateByCode(task.getStatus()), app.getExecuteUser(), task.getSubmitAddress(), task.getClusterId(), task); + JobChecker tmp = new JobChecker(app.getId(), TaskStatusEnum.getTaskStateByCode(task.getStatus()), task.getProgress(), StringUtils.isNotBlank(task.getTaskProxyUser()) ? task.getTaskProxyUser() : app.getExecuteUser(), task.getSubmitAddress(), task.getClusterName(), task); jobCheckers.add(tmp); } if (notEndTasks.isEmpty()) { - LOGGER.info("Find abnormal application, which tasks is all end, but application is not end"); - LOGGER.info("Start to recover application status"); + LOGGER.info("Find abnormal application, which tasks is all end, but application is not end."); List allTasks = taskDao.findByApplication(app); + app.resetTask(); - for (Task task : allTasks) { - if (task.getStatus().equals(TaskStatusEnum.FAILED.getCode())) { - iChecker.checkIfLastJob(app.getId(), false, false, false); - } else if (task.getStatus().equals(TaskStatusEnum.FAIL_CHECKOUT.getCode())) { - iChecker.checkIfLastJob(app.getId(), true, false, false); - } else if (task.getStatus().equals(TaskStatusEnum.PASS_CHECKOUT.getCode())) { - iChecker.checkIfLastJob(app.getId(), true, true, false); - } else if (task.getStatus().equals(TaskStatusEnum.TASK_NOT_EXIST.getCode())) { - iChecker.checkIfLastJob(app.getId(), false, false, true); - } else if (task.getStatus().equals(TaskStatusEnum.CANCELLED.getCode())) { - iChecker.checkIfLastJob(app.getId(), false, false, false); + applicationDao.saveApplication(app); + LOGGER.info("Finish to reset application status num."); + + LOGGER.info("Start to recover application status."); + try { + for (Task task : allTasks) { + if (task.getStatus().equals(TaskStatusEnum.FAILED.getCode())) { + iChecker.checkIfLastJob(app, false, false, false); + } else if (task.getAbortOnFailure() != null && !task.getAbortOnFailure() && task.getStatus() + .equals(TaskStatusEnum.FAIL_CHECKOUT.getCode())) { + iChecker.checkIfLastJob(app, true, false, false); + } else if (task.getStatus().equals(TaskStatusEnum.PASS_CHECKOUT.getCode())) { + iChecker.checkIfLastJob(app, true, true, false); + } else if (task.getStatus().equals(TaskStatusEnum.TASK_NOT_EXIST.getCode())) { + iChecker.checkIfLastJob(app, false, false, true); + } else if (task.getStatus().equals(TaskStatusEnum.CANCELLED.getCode())) { + app.setApplicationComment(ApplicationCommentEnum.TIMEOUT_KILL.getCode()); + iChecker.checkIfLastJob(app, false, false, false); + } } + LOGGER.info("Succeed to recover application status."); + } catch (Exception e) { + LOGGER.error("Failed to recover applications that are not end."); + LOGGER.error(e.getMessage(), e); } - LOGGER.info("Succeed to recover application status"); } } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/IChecker.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/IChecker.java index 5937e767..1facb11b 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/IChecker.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/IChecker.java @@ -17,6 +17,7 @@ package com.webank.wedatasphere.qualitis.timer; import com.webank.wedatasphere.qualitis.bean.JobChecker; +import com.webank.wedatasphere.qualitis.entity.Application; import com.webank.wedatasphere.qualitis.exception.ClusterInfoNotConfigException; import com.webank.wedatasphere.qualitis.bean.JobChecker; @@ -24,21 +25,19 @@ * @author howeye */ public interface IChecker { - /** * Check status of task * @param jobChecker * @throws ClusterInfoNotConfigException */ - void checkTaskStatus(JobChecker jobChecker) throws ClusterInfoNotConfigException; + void checkTaskStatus(JobChecker jobChecker); /** * Check if last job - * @param applicationId + * @param applicationInDb * @param finish * @param isPass * @param isNotExist */ - void checkIfLastJob(String applicationId, boolean finish, boolean isPass, boolean isNotExist); - + void checkIfLastJob(Application applicationInDb, boolean finish, boolean isPass, boolean isNotExist); } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/JobCheckerTimer.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/JobCheckerTimer.java index 8c134a9a..db0a1bf4 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/JobCheckerTimer.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/JobCheckerTimer.java @@ -22,6 +22,8 @@ import com.webank.wedatasphere.qualitis.dao.TaskDao; import com.webank.wedatasphere.qualitis.ha.AbstractServiceCoordinator; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler; +import org.springframework.scheduling.support.CronTrigger; import org.springframework.stereotype.Component; import java.util.concurrent.ScheduledExecutorService; @@ -35,7 +37,6 @@ */ @Component public class JobCheckerTimer { - @Autowired private ThreadPoolConfig threadPoolConfig; @Autowired @@ -50,7 +51,9 @@ public class JobCheckerTimer { @PostConstruct public void init() { ScheduledExecutorService executor = new ScheduledThreadPoolExecutor(threadPoolConfig.getSize(), new MonitoryThreadFactory()); - executor.scheduleWithFixedDelay(new CheckerRunnable(applicationDao, taskDao, iChecker, abstractServiceCoordinator), 0, threadPoolConfig.getPeriod(), TimeUnit.MILLISECONDS); + executor.scheduleWithFixedDelay( + new CheckerRunnable(applicationDao, taskDao, iChecker, abstractServiceCoordinator, threadPoolConfig.getUpdateJobSize()), + 0, threadPoolConfig.getPeriod(), TimeUnit.MILLISECONDS); } } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/TaskChecker.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/TaskChecker.java index b4ab2614..30edef5d 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/TaskChecker.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/TaskChecker.java @@ -17,27 +17,38 @@ package com.webank.wedatasphere.qualitis.timer; import com.webank.wedatasphere.qualitis.bean.JobChecker; +import com.webank.wedatasphere.qualitis.config.LinkisConfig; import com.webank.wedatasphere.qualitis.constant.AlarmConfigStatusEnum; +import com.webank.wedatasphere.qualitis.constant.ApplicationCommentEnum; import com.webank.wedatasphere.qualitis.constant.ApplicationStatusEnum; import com.webank.wedatasphere.qualitis.constant.TaskStatusEnum; -import com.webank.wedatasphere.qualitis.dao.*; -import com.webank.wedatasphere.qualitis.entity.*; -import com.webank.wedatasphere.qualitis.exception.ClusterInfoNotConfigException; -import com.webank.wedatasphere.qualitis.exception.TaskNotExistException; -import com.webank.wedatasphere.qualitis.job.MonitorManager; -import com.webank.wedatasphere.qualitis.util.PassUtil; import com.webank.wedatasphere.qualitis.dao.ApplicationDao; +import com.webank.wedatasphere.qualitis.dao.RuleMetricDao; import com.webank.wedatasphere.qualitis.dao.TaskDao; +import com.webank.wedatasphere.qualitis.dao.TaskDataSourceDao; +import com.webank.wedatasphere.qualitis.dao.TaskResultDao; +import com.webank.wedatasphere.qualitis.dao.TaskRuleSimpleDao; +import com.webank.wedatasphere.qualitis.dao.UserDao; import com.webank.wedatasphere.qualitis.entity.Application; import com.webank.wedatasphere.qualitis.entity.Task; +import com.webank.wedatasphere.qualitis.entity.TaskResult; import com.webank.wedatasphere.qualitis.entity.TaskRuleAlarmConfig; import com.webank.wedatasphere.qualitis.entity.TaskRuleSimple; -import com.webank.wedatasphere.qualitis.bean.JobChecker; -import com.webank.wedatasphere.qualitis.dao.ApplicationDao; -import com.webank.wedatasphere.qualitis.dao.TaskDao; +import com.webank.wedatasphere.qualitis.exception.ClusterInfoNotConfigException; +import com.webank.wedatasphere.qualitis.exception.JobKillException; import com.webank.wedatasphere.qualitis.exception.TaskNotExistException; +import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; import com.webank.wedatasphere.qualitis.job.MonitorManager; +import com.webank.wedatasphere.qualitis.rule.constant.RuleTemplateTypeEnum; +import com.webank.wedatasphere.qualitis.rule.dao.RuleDao; +import com.webank.wedatasphere.qualitis.rule.dao.RuleGroupDao; +import com.webank.wedatasphere.qualitis.submitter.ExecutionManager; import com.webank.wedatasphere.qualitis.util.PassUtil; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; @@ -46,52 +57,163 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; - -import java.util.*; +import org.springframework.web.client.ResourceAccessException; /** * @author howeye */ @Component public class TaskChecker implements IChecker { - @Autowired private MonitorManager monitorManager; @Autowired private TaskDao taskDao; @Autowired + private TaskResultDao taskResultDao; + @Autowired private ApplicationDao applicationDao; @Autowired - private TaskResultDao taskResultDao; + private TaskDataSourceDao taskDataSourceDao; + @Autowired + private RuleMetricDao ruleMetricDao; + @Autowired + private RuleGroupDao ruleGroupDao; + @Autowired + private RuleDao ruleDao; + @Autowired + private LinkisConfig linkisConfig; + @Autowired + private ExecutionManager executionManager; + @Autowired + private TaskRuleSimpleDao taskRuleSimpleDao; + @Autowired + private UserDao userDao; private static final String PRINT_TIME_PATTERN = "yyyy-MM-dd HH:mm:ss"; - private static final DateTimeFormatter PRINT_TIME_FORMAT = DateTimeFormat.forPattern(PRINT_TIME_PATTERN); private static final Logger LOGGER = LoggerFactory.getLogger(TaskChecker.class); + private static final DateTimeFormatter PRINT_TIME_FORMAT = DateTimeFormat.forPattern(PRINT_TIME_PATTERN); + + private static final Map ERR_CODE_TYPE = new HashMap(){{ + put(60075,2); + put(10001,2); + put(20001,3); + put(20002,3); + put(20003,3); + put(20083,3); + put(70059,3); + put(11011,3); + put(11012,3); + put(11013,3); + put(11014,3); + put(11015,3); + put(11016,3); + put(11017,3); + put(60035,3); + put(21304,3); + put(30001,1); + put(60010,1); + put(40001,4); + put(40002,10); + put(40004,4); + put(40003,4); + put(40005,4); + put(50001,4); + put(50002,4); + put(50003,4); + put(50004,4); + put(50005,4); + put(50007,4); + put(50012,4); + put(50013,4); + put(50014,4); + put(50017,4); + put(50019,4); + put(60003,4); + put(11017,3); + put(60035,3); + put(60075,2); + put(95002,1); + put(95003,1); + put(95004,1); + put(95006,1); + put(60079,1); + put(30002,5); + put(50007,4); + }}; @Override @Transactional(rollbackFor = Exception.class) - public void checkTaskStatus(JobChecker jobChecker) throws ClusterInfoNotConfigException { + public void checkTaskStatus(JobChecker jobChecker) { try { - String jobStatus; - jobStatus = monitorManager.getTaskStatus(jobChecker.getTaskId(), jobChecker.getUsername(), jobChecker.getUjesAddress(), jobChecker.getClusterName()).toUpperCase(); + Map taskInfos = monitorManager.getTaskStatus(jobChecker.getTaskId(), jobChecker.getUsername(), + jobChecker.getUjesAddress(), jobChecker.getClusterName()); + String jobStatus = ((String) taskInfos.get("status")).toUpperCase(); + Integer errCode = (Integer) taskInfos.get("errCode"); + LOGGER.info("Task status: {}", jobStatus); - if (!jobStatus.equals(jobChecker.getOldStatus())) { + if (! jobStatus.equals(jobChecker.getOldStatus())) { LOGGER.info("Start to update task status. old status: {}, new status: {}, task_id: {}", jobChecker.getOldStatus(), jobStatus, jobChecker.getTaskId()); - writeDb(jobChecker, jobStatus); + writeDb(jobChecker, jobStatus, errCode); LOGGER.info("Succeed to update task status. old status: {}, new status: {}, task_id: {}", jobChecker.getOldStatus(), jobStatus, jobChecker.getTaskId()); } + + // Compute task time in same progress. + if (linkisConfig.getKillStuckTasks() && TaskStatusEnum.RUNNING.getState().equals(jobStatus)) { + Task taskInDb = taskDao.findByRemoteTaskIdAndClusterName(jobChecker.getTaskId(), jobChecker.getClusterName()); + Double progress = (Double) taskInfos.get("progress"); + LOGGER.info("Old time progress[{}].", jobChecker.getOldProgress()); + LOGGER.info("Current time progress[{}].", progress); + long runningTime = System.currentTimeMillis() - taskInDb.getRunningTime(); + LOGGER.info("Current task running time [{}] minutes.", runningTime / (60 * 1000)); + if (progress.equals(jobChecker.getOldProgress())) { + long diff = System.currentTimeMillis() - taskInDb.getNewProgressTime(); + long diffMinutes = diff; + LOGGER.info("Time in same progress[{}]: {} minutes. Config max time: {} minutes.", progress, diffMinutes / (60 * 1000) + , linkisConfig.getKillStuckTasksTime().longValue() / (60 * 1000)); + if (diffMinutes > linkisConfig.getKillStuckTasksTime().longValue()) { + killTimeoutTask(applicationDao.findById(jobChecker.getApplicationId()),taskInDb, jobChecker); + } + } else { + LOGGER.info("Progress is updating , so is task new progress."); + taskInDb.setNewProgressTime(System.currentTimeMillis()); + taskInDb.setProgress(progress); + if (runningTime > linkisConfig.getKillStuckTasksTime().longValue()) { + killTimeoutTask(applicationDao.findById(jobChecker.getApplicationId()), taskInDb, jobChecker); + } + } + + taskDao.save(taskInDb); + } } catch (TaskNotExistException e) { LOGGER.error("Spark Task [{}] does not exist, application id : [{}]", jobChecker.getTaskId(), jobChecker.getApplicationId(), e); jobChecker.getTask().setStatus(TaskStatusEnum.TASK_NOT_EXIST.getCode()); taskDao.save(jobChecker.getTask()); jobChecker.getTask().getApplication().addAbnormalTaskNum(); applicationDao.saveApplication(jobChecker.getTask().getApplication()); + } catch (Exception e) { + LOGGER.error("Check task id:[{}] failed, application id:[{}]", jobChecker.getTaskId(), jobChecker.getApplicationId(), e); } } + private void killTimeoutTask(Application applicationInDb, Task taskInDb, JobChecker jobChecker) { + LOGGER.warn("Start to kill timeout task. Task remote ID:[{}]", taskInDb.getTaskRemoteId()); + // Kill timeout task. + try { + executionManager.killApplication(applicationInDb, jobChecker.getUsername()); + } catch (JobKillException e) { + LOGGER.error("Kill timeout task failed. Qualitis try to kill again. Exception: {}", e.getMessage(), e); + } catch (UnExpectedRequestException e) { + LOGGER.error("Kill timeout task failed. Qualitis try to kill again. Exception: {}", e.getMessage(), e); + } catch (ResourceAccessException e) { + LOGGER.error("Kill timeout task failed. Qualitis try to kill again.Exception: {}", e.getMessage(), e); + } catch (ClusterInfoNotConfigException e) { + LOGGER.error("Kill timeout task failed. Qualitis try to kill again.Exception: {}", e.getMessage(), e); + } + LOGGER.warn("Finish to kill timeout task. Task remote ID:[{}]", taskInDb.getTaskRemoteId()); + } + @Override - public void checkIfLastJob(String applicationId, boolean finish, boolean isPass, boolean isNotExist) { - Application applicationInDb = applicationDao.findById(applicationId); + public void checkIfLastJob(Application applicationInDb, boolean finish, boolean isPass, boolean isNotExist) { if (finish) { if (isPass) { applicationInDb.addSuccessJobNum(); @@ -100,25 +222,30 @@ public void checkIfLastJob(String applicationId, boolean finish, boolean isPass, applicationInDb.addNotPassTaskNum(); LOGGER.info("Application add not pass task, application: {}", applicationInDb); } - } else if (!isNotExist){ + } else if (! isNotExist) { applicationInDb.addFailJobNum(); LOGGER.info("Application add failed task, application: {}", applicationInDb); + } else if (isNotExist) { + applicationInDb.addAbnormalTaskNum(); + LOGGER.info("Application add abnormal task, application: {}", applicationInDb); } - ifLastTaskAndSaveApplication(applicationInDb); } - private void writeDb(JobChecker jobChecker, String newStatus) { - Task taskInDb = taskDao.findByRemoteTaskId(jobChecker.getTaskId()); + private void writeDb(JobChecker jobChecker, String newStatus, Integer errCode) { + Task taskInDb = taskDao.findByRemoteTaskIdAndClusterName(jobChecker.getTaskId(), jobChecker.getClusterName()); + Application applicationInDb = applicationDao.findById(jobChecker.getApplicationId()); if (newStatus.equals(TaskStatusEnum.FAILED.getState())) { /* * 1.Modify end time of job * 2.Modify task finish time and failed num if last job * */ taskInDb.setEndTime(new DateTime(new Date()).toString(PRINT_TIME_FORMAT)); + taskInDb.setTaskComment(errCode == null ? ApplicationCommentEnum.UNKNOWN_ERROR_ISSUES.getCode() : ERR_CODE_TYPE.get(errCode)); modifyJobStatus(taskInDb, newStatus); taskDao.save(taskInDb); - checkIfLastJob(jobChecker.getApplicationId(), false, false, false); + applicationInDb.setApplicationComment(errCode == null ? ApplicationCommentEnum.UNKNOWN_ERROR_ISSUES.getCode() : ERR_CODE_TYPE.get(errCode)); + checkIfLastJob(applicationInDb, false, false, false); } else if (newStatus.equals(TaskStatusEnum.SUCCEED.getState())) { /* * 1.Modify end time of job @@ -126,15 +253,30 @@ private void writeDb(JobChecker jobChecker, String newStatus) { * */ taskInDb.setEndTime(new DateTime(new Date()).toString(PRINT_TIME_FORMAT)); boolean isPass; + boolean finish; if (passCheckOut(jobChecker.getApplicationId(), taskInDb)) { modifyJobStatus(taskInDb, TaskStatusEnum.PASS_CHECKOUT.getState()); isPass = true; + finish = true; } else { - modifyJobStatus(taskInDb, TaskStatusEnum.FAIL_CHECKOUT.getState()); + if (taskInDb.getAbortOnFailure() != null && taskInDb.getAbortOnFailure()) { + modifyJobStatus(taskInDb, TaskStatusEnum.FAILED.getState()); + taskInDb.setTaskComment(ApplicationCommentEnum.DIFF_DATA_ISSUES.getCode()); + applicationInDb.setApplicationComment(ApplicationCommentEnum.DIFF_DATA_ISSUES.getCode()); + finish = false; + } else { + modifyJobStatus(taskInDb, TaskStatusEnum.FAIL_CHECKOUT.getState()); + finish = true; + } isPass = false; } taskDao.save(taskInDb); - checkIfLastJob(jobChecker.getApplicationId(), true, isPass, false); + checkIfLastJob(applicationInDb, finish, isPass, false); + } else if (newStatus.equals(TaskStatusEnum.CANCELLED.getState())) { + modifyJobStatus(taskInDb, newStatus); + taskDao.save(taskInDb); + applicationInDb.setApplicationComment(ApplicationCommentEnum.TIMEOUT_KILL.getCode()); + checkIfLastJob(applicationInDb, false, false, false); } else { modifyJobStatus(taskInDb, newStatus); taskDao.save(taskInDb); @@ -148,25 +290,36 @@ private void modifyJobStatus(Task task, String newStatus) { task.setStatus(TaskStatusEnum.INITED.getCode()); } else if (newStatus.equals(TaskStatusEnum.RUNNING.getState())) { task.setStatus(TaskStatusEnum.RUNNING.getCode()); + if (null == task.getRunningTime()) { + task.setRunningTime(System.currentTimeMillis()); + } task.getApplication().setStatus(ApplicationStatusEnum.RUNNING.getCode()); - applicationDao.saveApplication(task.getApplication()); + if (task.getNewProgressTime() == null) { + task.setNewProgressTime(System.currentTimeMillis()); + } + if (task.getApplication().getTotalTaskNum() != null) { + applicationDao.saveApplication(task.getApplication()); + } LOGGER.info("Succeed to set application status to [{}], application: {}", ApplicationStatusEnum.RUNNING.getState(), task.getApplication()); } else if (newStatus.equals(TaskStatusEnum.SUCCEED.getState())) { task.setStatus(TaskStatusEnum.SUCCEED.getCode()); + task.setProgress(Double.parseDouble("1")); } else if (newStatus.equals(TaskStatusEnum.FAILED.getState())) { task.setStatus(TaskStatusEnum.FAILED.getCode()); } else if (newStatus.equals(TaskStatusEnum.PASS_CHECKOUT.getState())) { task.setStatus(TaskStatusEnum.PASS_CHECKOUT.getCode()); + task.setProgress(Double.parseDouble("1")); + task.setTaskComment(ApplicationCommentEnum.SAME_ISSUES.getCode()); } else if (newStatus.equals(TaskStatusEnum.FAIL_CHECKOUT.getState())) { - if (task.isAbortOnFailure() != null && task.isAbortOnFailure()) { - task.setStatus(TaskStatusEnum.FAILED.getCode()); - } else { - task.setStatus(TaskStatusEnum.FAIL_CHECKOUT.getCode()); - } + task.setStatus(TaskStatusEnum.FAIL_CHECKOUT.getCode()); + task.setProgress(Double.parseDouble("1")); + task.setTaskComment(ApplicationCommentEnum.DIFF_DATA_ISSUES.getCode()); } else if (newStatus.equals(TaskStatusEnum.CANCELLED.getState())) { task.setStatus(TaskStatusEnum.CANCELLED.getCode()); + task.setTaskComment(ApplicationCommentEnum.TIMEOUT_KILL.getCode()); } else if (newStatus.equals(TaskStatusEnum.TIMEOUT.getState())) { task.setStatus(TaskStatusEnum.TIMEOUT.getCode()); + task.setTaskComment(ApplicationCommentEnum.TIMEOUT_KILL.getCode()); } else if (newStatus.equals(TaskStatusEnum.SCHEDULED.getState())) { task.setStatus(TaskStatusEnum.SCHEDULED.getCode()); } else { @@ -178,12 +331,12 @@ private void modifyJobStatus(Task task, String newStatus) { private Boolean passCheckOut(String applicationId, Task task) { Boolean passFlag = true; for (TaskRuleSimple taskRuleSimple : task.getTaskRuleSimples()) { - if (!checkTaskRuleSimplePass(applicationId, taskRuleSimple)) { + if (! checkTaskRuleSimplePass(applicationId, taskRuleSimple)) { passFlag = false; } if (taskRuleSimple.getChildRuleSimple() != null) { - if (!checkTaskRuleSimplePass(applicationId, taskRuleSimple.getChildRuleSimple())) { + if (! checkTaskRuleSimplePass(applicationId, taskRuleSimple.getChildRuleSimple())) { passFlag = false; } } @@ -192,37 +345,67 @@ private Boolean passCheckOut(String applicationId, Task task) { return passFlag; } + @Transactional(rollbackFor = {RuntimeException.class, UnExpectedRequestException.class}) private Boolean checkTaskRuleSimplePass(String applicationId, TaskRuleSimple taskRuleSimple) { Boolean passFlag = true; - TaskResult taskResult = taskResultDao.findByApplicationIdAndRuleId(applicationId, taskRuleSimple.getRuleId()); - for (TaskRuleAlarmConfig taskRuleAlarmConfig : taskRuleSimple.getTaskRuleAlarmConfigList()) { - if (PassUtil.notSafe(applicationId, taskRuleSimple.getRuleId(), taskRuleAlarmConfig, taskResult, taskResultDao)) { - taskRuleAlarmConfig.setStatus(AlarmConfigStatusEnum.PASS.getCode()); - } else { - passFlag = false; - taskRuleAlarmConfig.setStatus(AlarmConfigStatusEnum.NOT_PASS.getCode()); + List taskResults = taskResultDao.findByApplicationAndRule(applicationId, taskRuleSimple.getRuleId()); + for (TaskResult taskResult : taskResults) { + List taskRuleAlarmConfigList = taskRuleSimple.getTaskRuleAlarmConfigList(); + Long ruleMetricId = taskResult.getRuleMetricId(); + if (ruleMetricId != null && ruleMetricId.longValue() != -1) { + taskRuleAlarmConfigList = taskRuleAlarmConfigList.stream().filter(taskRuleAlarmConfig -> + taskRuleAlarmConfig.getRuleMetric().getId().equals(ruleMetricId) + ).collect(Collectors.toList()); + } + for (TaskRuleAlarmConfig taskRuleAlarmConfig : taskRuleAlarmConfigList) { + if (PassUtil.notSafe(applicationId, taskRuleSimple.getRuleId(), taskRuleAlarmConfig, taskResult, taskResultDao)) { + taskRuleAlarmConfig.setStatus(AlarmConfigStatusEnum.PASS.getCode()); + } else { + passFlag = false; + taskRuleAlarmConfig.setStatus(AlarmConfigStatusEnum.NOT_PASS.getCode()); + + if (taskRuleSimple.getRuleType().equals(RuleTemplateTypeEnum.CUSTOM.getCode())) { + if (taskRuleAlarmConfig.getDeleteFailCheckResult() != null && true == taskRuleAlarmConfig.getDeleteFailCheckResult().booleanValue()) { + taskResult.setSaveResult(false); + taskResultDao.saveTaskResult(taskResult); + } + } else { + if (taskRuleSimple.getDeleteFailCheckResult() != null && true == taskRuleSimple.getDeleteFailCheckResult().booleanValue()) { + taskResult.setSaveResult(false); + taskResultDao.saveTaskResult(taskResult); + } + } + + } } } + return passFlag; } private void ifLastTaskAndSaveApplication(Application applicationInDb) { if (isLastJob(applicationInDb)) { - LOGGER.info("Succeed to execute all task of application. application: {}", applicationInDb); + LOGGER.info("Succeed to execute all task of application. Application: {}", applicationInDb); applicationInDb.setFinishTime(new DateTime(new Date()).toString(PRINT_TIME_FORMAT)); if (applicationInDb.getFinishTaskNum().equals(applicationInDb.getTotalTaskNum())) { applicationInDb.setStatus(ApplicationStatusEnum.FINISHED.getCode()); + applicationInDb.setApplicationComment(ApplicationCommentEnum.SAME_ISSUES.getCode()); } else if (!applicationInDb.getFailTaskNum().equals(0) || !applicationInDb.getAbnormalTaskNum().equals(0)){ applicationInDb.setStatus(ApplicationStatusEnum.FAILED.getCode()); } else { applicationInDb.setStatus(ApplicationStatusEnum.NOT_PASS.getCode()); + applicationInDb.setApplicationComment(ApplicationCommentEnum.DIFF_DATA_ISSUES.getCode()); } } - applicationDao.saveApplication(applicationInDb); - LOGGER.info("Succeed to save application. application: {}", applicationInDb); + if (applicationInDb.getTotalTaskNum() != null) { + applicationDao.saveApplication(applicationInDb); + } + LOGGER.info("Succeed to save application. Application: {}", applicationInDb); } private boolean isLastJob(Application application) { + LOGGER.info("Calculate application num: application.getFinishTaskNum ADD application.getFailTaskNum ADD application.getNotPassTaskNum ADD application.getAbnormalTaskNum = {} ADD {} ADD {} ADD {} vs application.getTotalTaskNum == {}", + application.getFinishTaskNum(), application.getFailTaskNum(), application.getNotPassTaskNum(), application.getAbnormalTaskNum(), application.getTotalTaskNum()); if (application.getTotalTaskNum() == null) { return false; } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/UpdaterRunnable.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/UpdaterRunnable.java new file mode 100644 index 00000000..13c4ab98 --- /dev/null +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/UpdaterRunnable.java @@ -0,0 +1,62 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.timer; + +import com.webank.wedatasphere.qualitis.bean.JobChecker; +import com.webank.wedatasphere.qualitis.dao.ApplicationDao; +import com.webank.wedatasphere.qualitis.dao.TaskDao; +import com.webank.wedatasphere.qualitis.ha.AbstractServiceCoordinator; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author allenzhou + */ +public class UpdaterRunnable implements Runnable { + private IChecker iChecker; + private CountDownLatch latch; + private List jobCheckers; + + private static final Logger LOGGER = LoggerFactory.getLogger(UpdaterRunnable.class); + + public UpdaterRunnable(IChecker iChecker, List jobCheckers, CountDownLatch latch) { + this.iChecker = iChecker; + this.jobCheckers = jobCheckers; + this.latch = latch; + } + + @Override + public void run() { + try { + LOGGER.info(Thread.currentThread().getName() + " start to update applications."); + for (JobChecker jobChecker : this.jobCheckers) { + try { + iChecker.checkTaskStatus(jobChecker); + } catch (Exception t) { + LOGGER.error("Failed to check task status, application_id: {}, task ID: {}", jobChecker.getApplicationId(), jobChecker.getTaskId(), t); + } + } + } catch (Exception e) { + LOGGER.error("Failed to update application, caused by: {}", e.getMessage(), e); + } finally { + latch.countDown(); + } + } + +} diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/UpdaterThreadFactory.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/UpdaterThreadFactory.java new file mode 100644 index 00000000..2764749d --- /dev/null +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/UpdaterThreadFactory.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.timer; + +import java.util.Random; +import java.util.concurrent.ThreadFactory; + +/** + * @author howeye + */ +public class UpdaterThreadFactory implements ThreadFactory { + @Override + public Thread newThread(Runnable r) { + return new Thread(r); + } +} diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/PassUtil.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/PassUtil.java index ab589b8f..4b122c4e 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/PassUtil.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/PassUtil.java @@ -23,6 +23,13 @@ import com.webank.wedatasphere.qualitis.submitter.impl.ExecutionManagerImpl; import com.webank.wedatasphere.qualitis.entity.TaskRuleAlarmConfig; +import java.time.DayOfWeek; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.Month; +import java.time.ZoneId; +import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,10 +51,18 @@ public static Boolean notSafe(String taskId, Long ruleId, TaskRuleAlarmConfig al Integer checkTemplate = alarmConfig.getCheckTemplate(); Double thresholds = alarmConfig.getThreshold(); if (taskResult == null) { - LOGGER.error("Failed to find task result, task_id: {}, rule_id: {}", taskId, ruleId); + LOGGER.error("Failed to find task result, task ID: {}, rule ID: {}", taskId, ruleId); return false; } - Double result = taskResult.getValue(); + Double result = 0.0; + try { + if (StringUtils.isNotBlank(taskResult.getValue())) { + result = Double.parseDouble(taskResult.getValue()); + } + } catch (NumberFormatException e) { + return false; + } + if (checkTemplate.equals(CheckTemplateEnum.MONTH_FLUCTUATION.getCode())) { Double monthAvg = getMonthAvg(taskResultDao, taskResult.getRuleId()); return moreThanThresholds(result, monthAvg, thresholds); @@ -60,23 +75,31 @@ public static Boolean notSafe(String taskId, Long ruleId, TaskRuleAlarmConfig al } else if (checkTemplate.equals(CheckTemplateEnum.FIXED_VALUE.getCode())) { Integer compareType = alarmConfig.getCompareType(); return moreThanThresholds(result, thresholds, compareType); - } - LOGGER.error("CheckTemplate of alarmConfig is not supported, task_rule_alarmConfig id : {}", alarmConfig.getId()); - return false; - } + } else { + // Ring growth. + Integer compareType = alarmConfig.getCompareType(); + thresholds /= 100; + try { + result = getRingGrowth(taskResultDao, checkTemplate, ruleId, taskResult.getRuleMetricId()); + } catch (Exception e) { + LOGGER.info("Because the data of the previous period does not exist, the chain ratio cannot be calculated."); + return false; + } - private static Double getWeekAvg(TaskResultDao taskResultDao, Long ruleId) { - return getAvg(taskResultDao, Calendar.WEEK_OF_MONTH, ruleId); + return moreThanThresholds(result, thresholds, compareType); + } } private static Double getMonthAvg(TaskResultDao taskResultDao, Long ruleId) { return getAvg(taskResultDao, Calendar.MONTH, ruleId); } - private static Double getDayAvg(TaskResultDao taskResultDao, Long ruleId) { - return getAvg(taskResultDao, Calendar.DAY_OF_MONTH, ruleId); + private static Double getWeekAvg(TaskResultDao taskResultDao, Long ruleId) { + return getAvg(taskResultDao, Calendar.WEEK_OF_MONTH, ruleId); } + private static Double getDayAvg(TaskResultDao taskResultDao, Long ruleId) { return getAvg(taskResultDao, Calendar.DAY_OF_MONTH, ruleId); } + private static Double getAvg(TaskResultDao taskResultDao, Integer calendarStepUnit, Long ruleId) { Date nowDate = new Date(); Calendar calendar = Calendar.getInstance(); @@ -84,7 +107,127 @@ private static Double getAvg(TaskResultDao taskResultDao, Integer calendarStepUn calendar.add(calendarStepUnit, -1); Date lastMonthDate = calendar.getTime(); - return taskResultDao.findAvgByCreateTimeBetweenAndRuleId(ExecutionManagerImpl.PRINT_TIME_FORMAT.format(lastMonthDate), ExecutionManagerImpl.PRINT_TIME_FORMAT.format(nowDate), ruleId); + return taskResultDao.findAvgByCreateTimeBetweenAndRule(ExecutionManagerImpl.PRINT_TIME_FORMAT.format(lastMonthDate), ExecutionManagerImpl.PRINT_TIME_FORMAT.format(nowDate), ruleId); + } + + private static Double getRingGrowth(TaskResultDao taskResultDao, Integer ringType, Long ruleId, Long ruleMetricId) { + LocalDateTime localDateTime = LocalDateTime.now(); + LocalDateTime start; + LocalDateTime end; + LocalDateTime startOfLast; + LocalDateTime endOfLast; + if (ringType.equals(CheckTemplateEnum.FULL_YEAR_RING_GROWTH.getCode())) { + int year = localDateTime.getYear(); + // Location current time area and calculate avg. + start = LocalDateTime.of(year, 1, 1, 0, 0, 0); + end = LocalDateTime.of(year, 12, 31, 23, 59, 59); + startOfLast = LocalDateTime.of(year - 1, 1, 1, 0, 0, 0); + return specialTimeRingGrowth(start, end, startOfLast, taskResultDao, ruleId, ruleMetricId); + + } else if (ringType.equals(CheckTemplateEnum.HALF_YEAR_GROWTH.getCode())) { + int year = localDateTime.getYear(); + Month month = localDateTime.getMonth(); + // Location current time area and calculate avg. + if (month.getValue() >= Month.JULY.getValue() && month.getValue() <= Month.DECEMBER.getValue()) { + start = LocalDateTime.of(year, 6, 1, 0, 0, 0); + end = LocalDateTime.of(year, 12, 31, 23, 59, 59); + startOfLast = LocalDateTime.of(year, 1, 1, 0, 0, 0); + } else { + start = LocalDateTime.of(year, 1, 1, 0, 0, 0); + end = LocalDateTime.of(year, 6, 30, 23, 59, 59); + startOfLast = LocalDateTime.of(year - 1, 7, 1, 0, 0, 0); + } + return specialTimeRingGrowth(start, end, startOfLast, taskResultDao, ruleId, ruleMetricId); + } else if (ringType.equals(CheckTemplateEnum.SEASON_RING_GROWTH.getCode())) { + int year = localDateTime.getYear(); + Month month = localDateTime.getMonth(); + Month firstMonthOfQuarter = month.firstMonthOfQuarter(); + Month endMonthOfQuarter = Month.of(firstMonthOfQuarter.getValue() + 2); + if (month.getValue() >= Month.MARCH.getValue()) { + start = LocalDateTime.of(year, firstMonthOfQuarter.getValue(), 1, 0, 0, 0); + end = LocalDateTime.of(year, endMonthOfQuarter.getValue(), endMonthOfQuarter.maxLength(), 23, 59, 59); + startOfLast = LocalDateTime.of(year, 1, 1, 0, 0, 0); + return specialTimeRingGrowth(start, end, startOfLast, taskResultDao, ruleId, ruleMetricId); + } else { + start = LocalDateTime.of(year, firstMonthOfQuarter.getValue(), 1, 0, 0, 0); + end = LocalDateTime.of(year, endMonthOfQuarter.getValue(), endMonthOfQuarter.maxLength(), 23, 59, 59); + startOfLast = LocalDateTime.of(year - 1, Month.OCTOBER.getValue(), 1, 0, 0, 0); + return specialTimeRingGrowth(start, end, startOfLast, taskResultDao, ruleId, ruleMetricId); + } + } else if (ringType.equals(CheckTemplateEnum.MONTH_RING_GROWTH.getCode())) { + int year = localDateTime.getYear(); + Month month = localDateTime.getMonth(); + start = LocalDateTime.of(year, month.getValue(), 1, 0, 0, 0); + end = LocalDateTime.of(year, month.getValue(), month.maxLength(), 23, 59, 59); + startOfLast = LocalDateTime.of(year, 1, 1, 0, 0, 0); + return specialTimeRingGrowth(start, end, startOfLast, taskResultDao, ruleId, ruleMetricId); + } else if (ringType.equals(CheckTemplateEnum.WEEK_RING_GROWTH.getCode())) { + DayOfWeek dayOfWeek = localDateTime.getDayOfWeek(); + int week = dayOfWeek.getValue(); + start = LocalDateTime.of(LocalDate.now(), LocalTime.MIN).minusDays(week); + end = LocalDateTime.of(LocalDate.now(), LocalTime.MAX).plusDays(DayOfWeek.SUNDAY.getValue() - week); + startOfLast = start.minusWeeks(DayOfWeek.MONDAY.getValue()); + return specialTimeRingGrowth(start, end, startOfLast, taskResultDao, ruleId, ruleMetricId); + } else if (ringType.equals(CheckTemplateEnum.DAY_RING_GROWTH.getCode())) { + start = LocalDateTime.of(LocalDate.now(), LocalTime.MIN); + end = LocalDateTime.of(LocalDate.now(), LocalTime.MAX); + startOfLast = start.minusDays(1); + return specialTimeRingGrowth(start, end, startOfLast, taskResultDao, ruleId, ruleMetricId); + } else if (ringType.equals(CheckTemplateEnum.HOUR_RING_GROWTH.getCode())) { + start = LocalDateTime.of(localDateTime.getYear(), localDateTime.getMonthValue(), localDateTime.getDayOfMonth(), localDateTime.getHour() + , 0, 0); + end = start.plusHours(1); + startOfLast = start.minusHours(1); + return specialTimeRingGrowth(start, end, startOfLast, taskResultDao, ruleId, ruleMetricId); + } else if (ringType.equals(CheckTemplateEnum.YEAR_ON_YEAR)) { + return yearOnYear(localDateTime, taskResultDao, ruleId, ruleMetricId); + } else { + return - 1.0; + } + } + + private static Double yearOnYear(LocalDateTime localDateTime, TaskResultDao taskResultDao, Long ruleId, Long ruleMetricId) { + int year = localDateTime.getYear(); + Month month = localDateTime.getMonth(); + + LocalDateTime start = LocalDateTime.of(year, month, 1, 0, 0, 0); + LocalDateTime end = LocalDateTime.of(year, month.getValue(), month.maxLength(), 23, 59, 59); + LocalDateTime startOfLast = LocalDateTime.of(year - 1, month, 1, 0, 0, 0); + LocalDateTime endOfLast = LocalDateTime.of(year - 1, month.getValue(), month.maxLength(), 23, 59, 59); + + Date startDate = Date.from(start.atZone( ZoneId.systemDefault()).toInstant()); + Date endDate = Date.from(end.atZone( ZoneId.systemDefault()).toInstant()); + Date startOfLastDate = Date.from(startOfLast.atZone( ZoneId.systemDefault()).toInstant()); + Date endOfLastDate = Date.from(endOfLast.atZone( ZoneId.systemDefault()).toInstant()); + + Double avg = taskResultDao.findAvgByCreateTimeBetweenAndRuleAndRuleMetric(ExecutionManagerImpl.PRINT_TIME_FORMAT.format(startDate) + , ExecutionManagerImpl.PRINT_TIME_FORMAT.format(endDate), ruleId, ruleMetricId); + // Calculate pre time area. + Double avgOfLast = taskResultDao.findAvgByCreateTimeBetweenAndRuleAndRuleMetric(ExecutionManagerImpl.PRINT_TIME_FORMAT.format(startOfLastDate) + , ExecutionManagerImpl.PRINT_TIME_FORMAT.format(endOfLastDate), ruleId, ruleMetricId); + // Growth. + LOGGER.info("Finish to get this time ring."); + return (avg - avgOfLast) / avgOfLast; + } + + private static Double specialTimeRingGrowth(LocalDateTime start, LocalDateTime end, LocalDateTime startOfLast, TaskResultDao taskResultDao, Long ruleId + , Long ruleMetricId) { + Date startDate; + Date endDate; + Date startOfLastDate; + + LOGGER.info("Start to get this time ring."); + startDate = Date.from(start.atZone( ZoneId.systemDefault()).toInstant()); + endDate = Date.from(end.atZone( ZoneId.systemDefault()).toInstant()); + startOfLastDate = Date.from(startOfLast.atZone( ZoneId.systemDefault()).toInstant()); + Double avgOfYear = taskResultDao.findAvgByCreateTimeBetweenAndRuleAndRuleMetric(ExecutionManagerImpl.PRINT_TIME_FORMAT.format(startDate) + , ExecutionManagerImpl.PRINT_TIME_FORMAT.format(endDate), ruleId, ruleMetricId); + // Calculate pre time area. + Double avgOfLastYear = taskResultDao.findAvgByCreateTimeBetweenAndRuleAndRuleMetric(ExecutionManagerImpl.PRINT_TIME_FORMAT.format(startOfLastDate) + , ExecutionManagerImpl.PRINT_TIME_FORMAT.format(startDate), ruleId, ruleMetricId); + // Growth. + LOGGER.info("Finish to get this time ring."); + return (avgOfYear - avgOfLastYear) / avgOfLastYear; } private static Boolean moreThanThresholds(Double taskResult, Double compareValue, Double percentage) { @@ -122,7 +265,7 @@ private static Boolean moreThanThresholds(Double taskResult, Double compareValue } else if (compareType.equals(CompareTypeEnum.NOT_EQUAL.getCode())) { return !taskResult.equals(compareValue); } - LOGGER.warn("CompareType is not found, {}", compareType); + LOGGER.warn("Compare type is not found, {}", compareType); return null; } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/EventTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/EventTypeEnum.java new file mode 100644 index 00000000..4d35975b --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/EventTypeEnum.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.project.constant; + +/** + * @author allenzhou + */ +public enum EventTypeEnum { + /** + * Type of project + */ + MODIFY_PROJECT(1, "Modify Project"), + SUBMIT_PROJECT(2, "Submit Project"),; + + private Integer code; + private String message; + + EventTypeEnum(Integer code, String message) { + this.code = code; + this.message = message; + } + + public Integer getCode() { + return code; + } + + public void setCode(Integer code) { + this.code = code; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectOperateEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectOperateEnum.java new file mode 100644 index 00000000..31201716 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectOperateEnum.java @@ -0,0 +1,66 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.project.constant; + +/** + * @author allenzhou + */ +public enum ProjectOperateEnum { + /** + * Type of project + */ + ADD(1, "add", "添加"), + MODIFY(2, "modify", "编辑"), + EXECUTION(3, "execute", "执行"), + DOWNLOAD(4, "download", "下载"), + UPLOAD(5, "upload", "上传"), + ; + + private Integer code; + private String message; + private String zhMessage; + + ProjectOperateEnum(Integer code, String message, String zhMessage) { + this.code = code; + this.message = message; + this.zhMessage = zhMessage; + } + + public Integer getCode() { + return code; + } + + public void setCode(Integer code) { + this.code = code; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + public String getZhMessage() { + return zhMessage; + } + + public void setZhMessage(String zhMessage) { + this.zhMessage = zhMessage; + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectTypeEnum.java index 3dd374bd..c505dfbc 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectTypeEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectTypeEnum.java @@ -19,7 +19,7 @@ /** * @author howeye */ -public enum ProjectTypeEnum { +public enum ProjectTypeEnum { /** * Type of project */ diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/Project.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/Project.java index bf3a0648..48d9d78e 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/Project.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/Project.java @@ -18,10 +18,7 @@ import com.fasterxml.jackson.annotation.JsonIdentityInfo; import com.fasterxml.jackson.annotation.ObjectIdGenerators; -import com.webank.wedatasphere.qualitis.rule.entity.Rule; -import java.util.ArrayList; -import java.util.List; import javax.persistence.*; import java.util.Set; @@ -38,18 +35,20 @@ public class Project { private Long id; @Column(length = 170) private String name; + @Column(name = "cn_name") + private String cnName; @Column(length = 1700) private String description; @OneToMany(mappedBy = "project", cascade = {CascadeType.REMOVE, CascadeType.PERSIST}) private Set projectUsers; + @OneToMany(mappedBy = "project", cascade = {CascadeType.REMOVE, CascadeType.PERSIST}) + private Set projectEvents; + @OneToMany(mappedBy = "project", cascade = {CascadeType.REMOVE, CascadeType.PERSIST}, fetch = FetchType.EAGER) private Set projectLabels; - @OneToMany(mappedBy = "project", cascade = CascadeType.REMOVE, fetch = FetchType.EAGER) - private Set rules; - @Column(name = "create_user", length = 50) private String createUser; /** @@ -76,16 +75,9 @@ public class Project { public Project() { } - public Project(String projectName, String description, String username, String chineseName, String department) { - this.name = projectName; - this.description = description; - this.createUser = username; - this.createUserFullName = username + "(" + chineseName + ")"; - this.userDepartment = department; - } - - public Project(String projectName, String description, String username, String chineseName, String department, String createTime) { + public Project(String projectName, String cnName, String description, String username, String chineseName, String department, String createTime) { this.name = projectName; + this.cnName = cnName; this.description = description; this.createUser = username; this.createUserFullName = username + "(" + chineseName + ")"; @@ -109,6 +101,14 @@ public void setName(String name) { this.name = name; } + public String getCnName() { + return cnName; + } + + public void setCnName(String cnName) { + this.cnName = cnName; + } + public String getDescription() { return description; } @@ -133,14 +133,6 @@ public void setProjectLabels(Set projectLabels) { this.projectLabels = projectLabels; } - public Set getRules() { - return rules; - } - - public void setRules(Set rules) { - this.rules = rules; - } - public String getCreateUser() { return createUser; } @@ -197,6 +189,14 @@ public void setModifyTime(String modifyTime) { this.modifyTime = modifyTime; } + public Set getProjectEvents() { + return projectEvents; + } + + public void setProjectEvents(Set projectEvents) { + this.projectEvents = projectEvents; + } + @Override public String toString() { return "Project{" + diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectEvent.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectEvent.java new file mode 100644 index 00000000..ecdf8d27 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectEvent.java @@ -0,0 +1,174 @@ +package com.webank.wedatasphere.qualitis.project.entity; + +import java.util.Objects; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.ManyToOne; +import javax.persistence.Table; + +/** + * @author allenzhou@webank.com + * @date 2021/4/20 16:59 + */ +@Entity +@Table(name = "qualitis_project_event") +public class ProjectEvent { + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @ManyToOne + private Project project; + + @Column(name = "content", length = 500) + private String content; + @Column(name = "field", length = 50) + private String field; + @Column(name = "before_modify", length = 200) + private String beforeModify; + @Column(name = "after_modify", length = 200) + private String afterModify; + @Column(name = "modify_user", length = 50) + private String modifyUser; + @Column(name = "execute_user", length = 50) + private String executeUser; + + @Column(name = "time", length = 25) + private String time; + + @Column(name = "event_type") + private Integer eventType; + + public ProjectEvent() { + } + + public ProjectEvent(Project project, String content, String time) { + this.project = project; + this.content = content; + this.time = time; + } + + public ProjectEvent(Project project, String executeUser, String content, String time, Integer eventType) { + this.executeUser = executeUser; + this.eventType = eventType; + this.project = project; + this.content = content; + this.time = time; + } + + public ProjectEvent(Project projectInDb, String userName, String field, String beforeModify, String afterModify, String time, Integer eventType) { + this.beforeModify = beforeModify; + this.afterModify = afterModify; + this.eventType = eventType; + this.project = projectInDb; + this.modifyUser = userName; + this.field = field; + this.time = time; + + this.content = userName + " modified " + field; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Project getProject() { + return project; + } + + public void setProject(Project project) { + this.project = project; + } + + public String getContent() { + return content; + } + + public void setContent(String content) { + this.content = content; + } + + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public String getBeforeModify() { + return beforeModify; + } + + public void setBeforeModify(String beforeModify) { + this.beforeModify = beforeModify; + } + + public String getAfterModify() { + return afterModify; + } + + public void setAfterModify(String afterModify) { + this.afterModify = afterModify; + } + + public String getModifyUser() { + return modifyUser; + } + + public void setModifyUser(String modifyUser) { + this.modifyUser = modifyUser; + } + + public String getExecuteUser() { + return executeUser; + } + + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + public String getTime() { + return time; + } + + public void setTime(String time) { + this.time = time; + } + + public Integer getEventType() { + return eventType; + } + + public void setEventType(Integer eventType) { + this.eventType = eventType; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ProjectEvent that = (ProjectEvent) o; + return Objects.equals(id, that.id) && + Objects.equals(project, that.project) && + Objects.equals(content, that.content) && + Objects.equals(time, that.time) && + Objects.equals(eventType, that.eventType); + } + + @Override + public int hashCode() { + return Objects.hash(id, project, content, time, eventType); + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectLabel.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectLabel.java index a4be5323..2da30b00 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectLabel.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectLabel.java @@ -82,12 +82,12 @@ public boolean equals(Object o) { if (this == o) {return true;} if (o == null || getClass() != o.getClass()) {return false;} ProjectLabel that = (ProjectLabel) o; - return Objects.equals(id, that.id); + return Objects.equals(labelName, that.labelName); } @Override public int hashCode() { - return Objects.hash(id); + return Objects.hash(labelName); } @Override diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectUser.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectUser.java index f017789a..22f307a6 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectUser.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectUser.java @@ -47,6 +47,12 @@ public class ProjectUser { public ProjectUser() { } + public ProjectUser(Integer permission, Project project, String userName) { + this.permission = permission; + this.project = project; + this.userName = userName; + } + public ProjectUser(Integer permission, Project project, String userName, String userFullName) { this.permission = permission; this.project = project; diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/CheckTemplateEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/CheckTemplateEnum.java index 949af71a..6dc92594 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/CheckTemplateEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/CheckTemplateEnum.java @@ -26,12 +26,21 @@ public enum CheckTemplateEnum { /** * Monthly, weekly, day and fixed name + * Ring growth, year on year */ MONTH_FLUCTUATION(1,"月波动", "Month Fluctuation", Arrays.asList(Number.class)), WEEK_FLUCTUATION(2,"周波动", "Week Fluctuation", Arrays.asList(Number.class)), DAY_FLUCTUATION(3,"日波动", "Daily Fluctuation", Arrays.asList(Number.class)), FIXED_VALUE(4,"固定值", "Fix Value", Arrays.asList(Number.class)), - ; + FULL_YEAR_RING_GROWTH(5,"年环比", "Full Year Ring Growth", Arrays.asList(Number.class)), + HALF_YEAR_GROWTH(6,"半年环比", "Half Year Ring Growth", Arrays.asList(Number.class)), + SEASON_RING_GROWTH(7,"季环比", "Season Ring Growth", Arrays.asList(Number.class)), + MONTH_RING_GROWTH(8,"月环比", "Month Ring Growth", Arrays.asList(Number.class)), + WEEK_RING_GROWTH(9,"周环比", "Week Ring Growth", Arrays.asList(Number.class)), + DAY_RING_GROWTH(10,"日环比", "Day Ring Growth", Arrays.asList(Number.class)), + HOUR_RING_GROWTH(11,"时环比", "Hour Ring Growth", Arrays.asList(Number.class)), + YEAR_ON_YEAR(12,"月同比", "YEAR ON YEAR", Arrays.asList(Number.class)), + ; private Integer code; private String zhMessage; @@ -82,7 +91,7 @@ public static Integer getCheckTemplateCode(String checkTemplateName) { public static String getCheckTemplateName(Integer code, String local) { for (CheckTemplateEnum c : CheckTemplateEnum.values()) { if (c.getCode().equals(code)) { - if (local.equals("en_US")) { + if ("en_US".equals(local)) { return c.getEnMessage(); } else { return c.getZhMessage(); @@ -94,7 +103,7 @@ public static String getCheckTemplateName(Integer code, String local) { public static Integer getCheckTemplateCode(String checkTemplateName, String local) { for (CheckTemplateEnum c : CheckTemplateEnum.values()) { - if (local.equals("en_US")) { + if ("en_US".equals(local)) { if (c.getEnMessage().equals(checkTemplateName)) { return c.getCode(); } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/FileOutputNameEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/FileOutputNameEnum.java new file mode 100644 index 00000000..0b9a91e2 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/FileOutputNameEnum.java @@ -0,0 +1,89 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.rule.constant; + +import java.util.Arrays; +import java.util.List; + +/** + * Enum in file output name of AlarmConfig + * @author howeye + */ +public enum FileOutputNameEnum { + /** + * file count, dir size. + */ + FILE_COUNT(1,"文件数", "file count", Arrays.asList(Number.class)), + DIR_SIZE(2,"文件目录大小", "dir size", Arrays.asList(Number.class)) + ; + + private Integer code; + private String zhMessage; + private String enMessage; + private List classes; + + FileOutputNameEnum(Integer code, String zhMessage, String enMessage, List classes) { + this.code = code; + this.zhMessage = zhMessage; + this.enMessage = enMessage; + this.classes = classes; + } + + public Integer getCode() { + return code; + } + + public String getZhMessage() { + return zhMessage; + } + + public String getEnMessage() { + return enMessage; + } + + public List getClasses() { + return classes; + } + + public static String getFileOutputName(Integer code, String local) { + for (FileOutputNameEnum c : FileOutputNameEnum.values()) { + if (c.getCode().equals(code)) { + if (local != null && "en_US".equals(local)) { + return c.getEnMessage(); + } else { + return c.getZhMessage(); + } + } + } + return null; + } + + public static Integer getFileOutputNameCode(String checkTemplateName, String local) { + for (FileOutputNameEnum c : FileOutputNameEnum.values()) { + if (local != null && "en_US".equals(local)) { + if (c.getEnMessage().equals(checkTemplateName)) { + return c.getCode(); + } + } else { + if (c.getZhMessage().equals(checkTemplateName)) { + return c.getCode(); + } + } + } + return null; + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/FileOutputUnitEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/FileOutputUnitEnum.java new file mode 100644 index 00000000..75e36f53 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/FileOutputUnitEnum.java @@ -0,0 +1,76 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.rule.constant; + +import java.util.Arrays; +import java.util.List; + +/** + * Enum in file output unit. + * @author howeye + */ +public enum FileOutputUnitEnum { + /** + * TB, GB, MB, KB + */ + TB(1,"TB", Arrays.asList(Number.class)), + GB(2,"GB", Arrays.asList(Number.class)), + MB(3,"MB", Arrays.asList(Number.class)), + KB(4,"KB", Arrays.asList(Number.class)), + B(5,"B", Arrays.asList(Number.class)) + ; + + private Integer code; + private String message; + private List classes; + + FileOutputUnitEnum(Integer code, String message, List classes) { + this.code = code; + this.message = message; + this.classes = classes; + } + + public Integer getCode() { + return code; + } + + public String getMessage() { + return message; + } + + public List getClasses() { + return classes; + } + + public static String fileOutputUnit(Integer code) { + for (FileOutputUnitEnum c : FileOutputUnitEnum.values()) { + if (c.getCode().equals(code)) { + return c.getMessage(); + } + } + return null; + } + + public static Integer fileOutputUnitCode(String message) { + for (FileOutputUnitEnum c : FileOutputUnitEnum.values()) { + if (c.getMessage().equals(message)) { + return c.getCode(); + } + } + return null; + } +} diff --git a/web/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateDataSourceTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RoleDefaultTypeEnum.java similarity index 80% rename from web/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateDataSourceTypeEnum.java rename to core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RoleDefaultTypeEnum.java index 61b8ab58..a2f5a3c3 100644 --- a/web/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateDataSourceTypeEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RoleDefaultTypeEnum.java @@ -17,21 +17,20 @@ package com.webank.wedatasphere.qualitis.rule.constant; /** - * @author howeye + * @author allenzhou */ -public enum TemplateDataSourceTypeEnum { +public enum RoleDefaultTypeEnum { /** - * hive, mysql, kafka + * type */ - HIVE(1, "hive"), - MYSQL(2, "mysql"), - KAFKA(3, "kafka"), - ; + ADMIN(1, "ADMIN"), + PROJECTOR(2, "PROJECTOR"), + DEPARTMENT_ADMIN(0, "DEPARTMEAT_ADMIN"); private Integer code; private String message; - TemplateDataSourceTypeEnum(Integer code, String message) { + RoleDefaultTypeEnum(Integer code, String message) { this.code = code; this.message = message; } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RuleTemplateLevelEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RuleTemplateLevelEnum.java new file mode 100644 index 00000000..6ce9ba80 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RuleTemplateLevelEnum.java @@ -0,0 +1,45 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.rule.constant; + +/** + * @author allenzhou + */ +public enum RuleTemplateLevelEnum { + /** + * level + */ + DEFAULT_TEMPLATE(1, "内置模版"), + DEPARTMENT_TEMPLATE(2, "部门模版"), + PERSONAL_TEMPLATE(3, "个人模版"); + + private Integer code; + private String message; + + RuleTemplateLevelEnum(Integer code, String message) { + this.code = code; + this.message = message; + } + + public Integer getCode() { + return code; + } + + public String getMessage() { + return message; + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RuleTemplateTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RuleTemplateTypeEnum.java index 8da24182..42ccd320 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RuleTemplateTypeEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RuleTemplateTypeEnum.java @@ -24,10 +24,12 @@ public enum RuleTemplateTypeEnum { * 1 Single Table verification template * 2 Custom template * 3 Multi-table verification template + * 4 File custom template */ SINGLE_SOURCE_TEMPLATE(1, "单表模版"), CUSTOM(2, "自定义模版"), - MULTI_SOURCE_TEMPLATE(3, "跨表模版"); + MULTI_SOURCE_TEMPLATE(3, "跨表模版"), + FILE_COUSTOM(4, "文件自定义模版"); private Integer code; private String message; diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateDataSourceTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateDataSourceTypeEnum.java new file mode 100644 index 00000000..8c126da5 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateDataSourceTypeEnum.java @@ -0,0 +1,65 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.rule.constant; + +/** + * @author howeye + */ +public enum TemplateDataSourceTypeEnum { + /** + * hive, mysql, tdsql, kafka, fps + */ + HIVE(1, "hive"), + MYSQL(2, "mysql"), + TDSQL(3, "tdsql"), + KAFKA(4, "kafka"), + FPS(5, "fps"); + + private Integer code; + private String message; + + TemplateDataSourceTypeEnum(Integer code, String message) { + this.code = code; + this.message = message; + } + + public Integer getCode() { + return code; + } + + public String getMessage() { + return message; + } + + public static String getMessage(Integer code) { + for (TemplateDataSourceTypeEnum templateDataSourceTypeEnum : TemplateDataSourceTypeEnum.values()) { + if (templateDataSourceTypeEnum.getCode().equals(code)) { + return templateDataSourceTypeEnum.getMessage(); + } + } + return TemplateDataSourceTypeEnum.HIVE.getMessage(); + } + + public static Integer getCode(String message) { + for (TemplateDataSourceTypeEnum templateDataSourceTypeEnum : TemplateDataSourceTypeEnum.values()) { + if (templateDataSourceTypeEnum.getMessage().toLowerCase().equals(message)) { + return templateDataSourceTypeEnum.getCode(); + } + } + return TemplateDataSourceTypeEnum.HIVE.getCode(); + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateInputTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateInputTypeEnum.java index 5d52d9c0..c9fcdde6 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateInputTypeEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateInputTypeEnum.java @@ -47,8 +47,16 @@ public enum TemplateInputTypeEnum { SOURCE_FIELD(18, "join左字段"), TARGET_FIELD(19, "join右字段"), FRONT_CONDITION(20, "前置条件"), - BEHIND_CONDITION(21, "后置条件") + BEHIND_CONDITION(21, "后置条件"), + SOURCE_FIELDS(22, "来源字段"), + TARGET_FIELDS(23, "目标字段"), + + /** + * Provided for primary line repeat + */ + FIELD_REPLACE_NULL_CONCAT(24, "替换空字段拼接"), ; + private Integer code; private String message; diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDao.java index d276c0bb..61cf71e1 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDao.java @@ -28,6 +28,13 @@ */ public interface RuleDao { + /** + * Count by project + * @param project + * @return + */ + int countByProject(Project project); + /** * Find rule by project * @param project @@ -35,6 +42,24 @@ public interface RuleDao { */ List findByProject(Project project); + /** + * Find by project with page. + * @param project + * @param page + * @param size + * @return + */ + List findByProjectWithPage(Project project, int page, int size); + + /** + * Find by rule metric ID with page. + * @param ruleMetricId + * @param page + * @param size + * @return + */ +// List findByRuleMetricWithPage(Long ruleMetricId, int page, int size); + /** * Find rule by project * @param project @@ -63,6 +88,12 @@ public interface RuleDao { */ void deleteRule(Rule rule); + /** + * Delete all rules + * @param rules + */ + void deleteAllRule(List rules); + /** * Find rules by ids * @param ruleIds diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDataSourceCountDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDataSourceCountDao.java new file mode 100644 index 00000000..a39a696b --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDataSourceCountDao.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.rule.dao; + +import com.webank.wedatasphere.qualitis.rule.entity.RuleDataSourceCount; + +/** + * @author allenzhou + */ +public interface RuleDataSourceCountDao { + + /** + * Find datasource count + * @param datasourceName + * @param userId + * @return + */ + Integer findCount(String datasourceName, Long userId); + + /** + * Save. + * @param ruleDataSourceCount + * @return + */ + RuleDataSourceCount save(RuleDataSourceCount ruleDataSourceCount); + +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDataSourceDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDataSourceDao.java index 00f65183..a76a85a4 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDataSourceDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDataSourceDao.java @@ -64,7 +64,7 @@ public interface RuleDataSourceDao { * @param user * @return */ - List> findProjectDsByUser(String user); + List> findProjectDsByUser(String user); /** * Paging query rule datasource @@ -73,7 +73,7 @@ public interface RuleDataSourceDao { * @param size * @return */ - List> findProjectDsByUser(String user, int page, int size); + List> findProjectDsByUser(String user, int page, int size); /** * Find rules related with cluster name, database name ,table name, column name. @@ -86,6 +86,30 @@ public interface RuleDataSourceDao { */ List findRuleByDataSource(String clusterName, String dbName, String tableName, String colName, String user); + /** + * Paging rules related with cluster name, database name ,table name, column name. + * @param clusterName + * @param dbName + * @param tableName + * @param colName + * @param user + * @param page + * @param size + * @return + */ + List findRuleByDataSource(String clusterName, String dbName, String tableName, String colName, String user, int page, int size); + + /** + * Count. + * @param clusterName + * @param dbName + * @param tableName + * @param colName + * @param user + * @return + */ + int countRuleByDataSource(String clusterName, String dbName, String tableName, String colName, String user); + /** * Filter rule datasource * @param user @@ -94,7 +118,7 @@ public interface RuleDataSourceDao { * @param tableName * @return */ - List> filterProjectDsByUser(String user, String clusterName, String dbName, String tableName); + List> filterProjectDsByUser(String user, String clusterName, String dbName, String tableName); /** * Filter rule datasource pageable. @@ -106,5 +130,32 @@ public interface RuleDataSourceDao { * @param size * @return */ - List> filterProjectDsByUserPage(String user, String clusterName, String dbName, String tableName, int page, int size); + List> filterProjectDsByUserPage(String user, String clusterName, String dbName, String tableName, int page, int size); + + /** + * Save rule datasource + * @param newRuleDataSource + * @return + */ + RuleDataSource saveRuleDataSource(RuleDataSource newRuleDataSource); + + /** + * Find cols' name. + * @param user + * @param clusterName + * @param dbName + * @param tableName + * @return + */ + List findColsByUser(String user, String clusterName, String dbName, String tableName); + + /** + * Find all datasources by user. + * @param user + * @param clusterName + * @param dbName + * @param tableName + * @return + */ + List findDatasourcesByUser(String user, String clusterName, String dbName, String tableName); } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleGroupDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleGroupDao.java index b5e2f5cd..0d695263 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleGroupDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleGroupDao.java @@ -17,6 +17,7 @@ package com.webank.wedatasphere.qualitis.rule.dao; import com.webank.wedatasphere.qualitis.rule.entity.RuleGroup; +import java.util.List; /** * @author howeye @@ -52,4 +53,10 @@ public interface RuleGroupDao { */ RuleGroup findByRuleGroupNameAndProjectId(String ruleGroupName, Long projectId); + /** + * Find rule group list by project id + * @param projectId + * @return + */ + List findByProjectId(Long projectId); } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleTemplateDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleTemplateDao.java index d030e07a..e625b03d 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleTemplateDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleTemplateDao.java @@ -16,6 +16,8 @@ package com.webank.wedatasphere.qualitis.rule.dao; +import com.webank.wedatasphere.qualitis.entity.Department; +import com.webank.wedatasphere.qualitis.entity.User; import com.webank.wedatasphere.qualitis.rule.entity.Template; import java.util.List; @@ -39,6 +41,15 @@ public interface RuleTemplateDao { */ List