[improvement][headless]Clean code logic of headless translator.

This commit is contained in:
jerryjzhang
2024-11-26 10:40:27 +08:00
parent 91b16f95ff
commit b84dde3799
17 changed files with 84 additions and 91 deletions

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.pojo;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.RecordInfo;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.AESEncryptionUtil;
import lombok.AllArgsConstructor;
import lombok.Builder;
@@ -36,7 +37,7 @@ public class Database extends RecordInfo {
private String schema;
/** mysql,clickhouse */
private String type;
private EngineType type;
private List<String> admins = Lists.newArrayList();

View File

@@ -50,42 +50,40 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
}
} catch (Exception e) {
queryStatement.setErrMsg(e.getMessage());
log.error("Failed to translate semantic query [{}]", e);
log.error("Failed to translate semantic query [{}]", e.getMessage(), e);
}
}
private void doOntologyParse(QueryStatement queryStatement) throws Exception {
OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam();
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam();
log.info("parse with ontology: [{}]", ontologyQueryParam);
ComponentFactory.getQueryParser().parse(queryStatement);
String ontologyQueryTable = sqlQueryParam.getTable();
String ontologyQuerySql = sqlQueryParam.getSql();
String ontologySql = queryStatement.getSql();
if (!queryStatement.isOk()) {
throw new Exception(String.format("parse ontology table [%s] error [%s]",
ontologyQueryTable, queryStatement.getErrMsg()));
queryStatement.getSqlQueryParam().getTable(), queryStatement.getErrMsg()));
}
List<Pair<String, String>> tables = new ArrayList<>();
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam();
String ontologyQuerySql = sqlQueryParam.getSql();
String ontologyInnerTable = sqlQueryParam.getTable();
String ontologyInnerSql = queryStatement.getSql();
tables.add(Pair.of(ontologyQueryTable, ontologySql));
List<Pair<String, String>> tables = new ArrayList<>();
tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql));
if (sqlQueryParam.isSupportWith()) {
EngineType engineType =
EngineType.fromString(queryStatement.getOntology().getDatabase().getType());
EngineType engineType = queryStatement.getOntology().getDatabase().getType();
if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) {
String withSql = "with " + tables.stream()
.map(t -> String.format("%s as (%s)", t.getLeft(), t.getRight()))
.collect(Collectors.joining(",")) + "\n" + ontologyQuerySql;
queryStatement.setSql(withSql);
} else {
List<String> parentTableList =
List<String> withTableList =
tables.stream().map(Pair::getLeft).collect(Collectors.toList());
List<String> parentSqlList =
List<String> withSqlList =
tables.stream().map(Pair::getRight).collect(Collectors.toList());
String mergeSql = SqlMergeWithUtils.mergeWith(engineType, ontologyQuerySql,
parentSqlList, parentTableList);
withSqlList, withTableList);
queryStatement.setSql(mergeSql);
}
} else {

View File

@@ -28,7 +28,8 @@ public class CalciteQueryParser implements QueryParser {
.enableOptimize(queryStatement.getEnableOptimize()).build())
.build();
SqlBuilder sqlBuilder = new SqlBuilder(semanticSchema);
sqlBuilder.buildOntologySql(queryStatement);
String sql = sqlBuilder.buildOntologySql(queryStatement);
queryStatement.setSql(sql);
}
}

View File

@@ -33,7 +33,7 @@ public class SchemaBuilder {
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema,
Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory,
Configuration.config);
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
S2SQLSqlValidatorImpl s2SQLSqlValidator =
new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
Configuration.typeFactory, Configuration.getValidatorConfig(engineType));

View File

@@ -35,7 +35,7 @@ public class SqlBuilder {
this.schema = schema;
}
public void buildOntologySql(QueryStatement queryStatement) throws Exception {
public String buildOntologySql(QueryStatement queryStatement) throws Exception {
this.ontologyQueryParam = queryStatement.getOntologyQueryParam();
if (ontologyQueryParam.getMetrics() == null) {
ontologyQueryParam.setMetrics(new ArrayList<>());
@@ -50,14 +50,12 @@ public class SqlBuilder {
buildParseNode();
Database database = queryStatement.getOntology().getDatabase();
EngineType engineType = EngineType.fromString(database.getType());
optimizeParseNode(engineType);
String sql = getSql(engineType);
queryStatement.setSql(sql);
optimizeParseNode(database.getType());
return getSql(database.getType());
}
private void buildParseNode() throws Exception {
// find the match Datasource
// find relevant data models
scope = SchemaBuilder.getScope(schema);
List<DataModel> dataModels =
DataModelNode.getRelatedDataModels(scope, schema, ontologyQueryParam);

View File

@@ -168,7 +168,7 @@ public class DataModelNode extends SemanticNode {
public static void mergeQueryFilterDimensionMeasure(S2CalciteSchema schema,
OntologyQueryParam metricCommand, Set<String> queryDimension, List<String> measures,
SqlValidatorScope scope) throws Exception {
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>();
FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType),
@@ -229,8 +229,7 @@ public class DataModelNode extends SemanticNode {
}
filterMeasure.addAll(sourceMeasure);
filterMeasure.addAll(dimension);
EngineType engineType =
EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures,
scope);
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension,

View File

@@ -32,7 +32,7 @@ public class FilterRender extends Renderer {
SqlNode filterNode = null;
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType);

View File

@@ -50,7 +50,7 @@ public class JoinRender extends Renderer {
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
@@ -146,7 +146,7 @@ public class JoinRender extends Renderer {
Set<String> sourceMeasure, SqlValidatorScope scope, S2CalciteSchema schema,
boolean nonAgg) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
for (String m : reqMetrics) {
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
MetricNode metricNode = buildMetricNode(m, dataModel, scope, schema, nonAgg, alias);
@@ -181,7 +181,7 @@ public class JoinRender extends Renderer {
Set<String> dimension, SqlValidatorScope scope, S2CalciteSchema schema)
throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
for (String d : reqDimensions) {
if (getMatchDimension(schema, dimension, dataModel, d, queryDimension)) {
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
@@ -261,7 +261,7 @@ public class JoinRender extends Renderer {
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView,
Map<String, String> before, DataModel dataModel, S2CalciteSchema schema,
SqlValidatorScope scope) throws Exception {
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
SqlNode condition =
getCondition(leftTable, tableView, dataModel, schema, scope, engineType);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
@@ -454,8 +454,7 @@ public class JoinRender extends Renderer {
endTime = zipper.getAlias() + "." + endTimeOp.get().getName();
dateTime = partMetric.getAlias() + "." + partTime.get().getName();
}
EngineType engineType =
EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
ArrayList<SqlNode> operandList =
new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType)));

View File

@@ -25,7 +25,7 @@ public class OutputRender extends Renderer {
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView;
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
for (String dimension : metricCommand.getDimensions()) {
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
}

View File

@@ -109,7 +109,7 @@ public class SourceRender extends Renderer {
S2CalciteSchema schema, boolean nonAgg, Map<String, String> extendFields,
TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception {
List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName());
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
@@ -187,7 +187,7 @@ public class SourceRender extends Renderer {
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
while (iterator.hasNext()) {
String cur = iterator.next();
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
@@ -341,7 +341,7 @@ public class SourceRender extends Renderer {
String queryWhere = ontologyQueryParam.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType engineType = schema.getOntology().getDatabase().getType();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields);

View File

@@ -62,8 +62,7 @@ public class MetricRatioConverter implements QueryConverter {
@Override
public void convert(QueryStatement queryStatement) throws Exception {
Database database = queryStatement.getOntology().getDatabase();
generateRatioSql(queryStatement, EngineType.fromString(database.getType().toUpperCase()),
database.getVersion());
generateRatioSql(queryStatement, database.getType(), database.getVersion());
}
/** Ratio */

View File

@@ -34,31 +34,24 @@ public class SqlQueryConverter implements QueryConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
if (Objects.nonNull(queryStatement.getSqlQueryParam()) && queryStatement.getIsS2SQL()) {
return true;
}
return false;
return Objects.nonNull(queryStatement.getSqlQueryParam()) && queryStatement.getIsS2SQL();
}
@Override
public void convert(QueryStatement queryStatement) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
convertNameToBizName(queryStatement);
rewriteFunction(queryStatement);
String reqSql = queryStatement.getSqlQueryParam().getSql();
String tableName = SqlSelectHelper.getTableName(reqSql);
rewriteOrderBy(queryStatement);
// fill sqlQuery
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam();
String tableName = SqlSelectHelper.getTableName(sqlQueryParam.getSql());
if (StringUtils.isEmpty(tableName)) {
return;
}
// replace order by field with the select sequence number
queryStatement.setSql(SqlReplaceHelper.replaceAggAliasOrderbyField(reqSql));
log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, queryStatement.getSql());
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
// fill dataSetQuery
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam();
sqlQueryParam.setTable(tableName.toLowerCase());
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
if (!sqlGenerateUtils.isSupportWith(
EngineType.fromString(semanticSchemaResp.getDatabaseResp().getType().toUpperCase()),
semanticSchemaResp.getDatabaseResp().getVersion())) {
@@ -67,27 +60,26 @@ public class SqlQueryConverter implements QueryConverter {
}
// build ontologyQuery
List<String> allFields = SqlSelectHelper.getAllSelectFields(queryStatement.getSql());
List<String> allFields = SqlSelectHelper.getAllSelectFields(sqlQueryParam.getSql());
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
List<String> metrics =
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
AggOption aggOption = getAggOption(queryStatement, metricSchemas);
AggOption aggOption = getAggOption(sqlQueryParam.getSql(), metricSchemas);
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam();
ontologyQueryParam.getMetrics().addAll(metrics);
ontologyQueryParam.getDimensions().addAll(dimensions);
ontologyQueryParam.setAggOption(aggOption);
ontologyQueryParam.setNativeQuery(!AggOption.isAgg(aggOption));
log.info("parse sqlQuery [{}] ", sqlQueryParam);
queryStatement.setOntologyQueryParam(ontologyQueryParam);
queryStatement.setSql(sqlQueryParam.getSql());
generateDerivedMetric(sqlGenerateUtils, queryStatement);
queryStatement.setSql(sqlQueryParam.getSql());
log.info("parse sqlQuery [{}] ", sqlQueryParam);
}
private AggOption getAggOption(QueryStatement queryStatement,
List<MetricSchemaResp> metricSchemas) {
String sql = queryStatement.getSql();
private AggOption getAggOption(String sql, List<MetricSchemaResp> metricSchemas) {
if (SqlSelectFunctionHelper.hasAggregateFunction(sql)) {
return AggOption.AGGREGATION;
}
@@ -148,30 +140,31 @@ public class SqlQueryConverter implements QueryConverter {
private void generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
SqlQueryParam dsParam = queryStatement.getSqlQueryParam();
OntologyQueryParam ontology = queryStatement.getOntologyQueryParam();
String sql = dsParam.getSql();
SqlQueryParam sqlParam = queryStatement.getSqlQueryParam();
OntologyQueryParam ontologyParam = queryStatement.getOntologyQueryParam();
String sql = sqlParam.getSql();
Set<String> measures = new HashSet<>();
Map<String, String> replaces = generateDerivedMetric(sqlGenerateUtils, semanticSchemaResp,
ontology.getAggOption(), ontology.getMetrics(), ontology.getDimensions(), measures);
ontologyParam.getAggOption(), ontologyParam.getMetrics(),
ontologyParam.getDimensions(), measures);
if (!CollectionUtils.isEmpty(replaces)) {
// metricTable sql use measures replace metric
sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces);
ontology.setAggOption(AggOption.NATIVE);
ontologyParam.setAggOption(AggOption.NATIVE);
// metricTable use measures replace metric
if (!CollectionUtils.isEmpty(measures)) {
ontology.getMetrics().addAll(measures);
ontologyParam.getMetrics().addAll(measures);
} else {
// empty measure , fill default
ontology.setMetrics(new ArrayList<>());
ontology.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, ontology.getDimensions())));
ontologyParam.setMetrics(new ArrayList<>());
ontologyParam.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, ontologyParam.getDimensions())));
}
}
dsParam.setSql(sql);
sqlParam.setSql(sql);
}
private Map<String, String> generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
@@ -245,6 +238,13 @@ public class SqlQueryConverter implements QueryConverter {
queryStatement.getSqlQueryParam().setSql(sql);
}
private void rewriteOrderBy(QueryStatement queryStatement) {
// replace order by field with the select sequence number
String sql = queryStatement.getSqlQueryParam().getSql();
String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql);
queryStatement.getSqlQueryParam().setSql(newSql);
}
private void rewriteFunction(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
@@ -300,7 +300,7 @@ public class SqlQueryConverter implements QueryConverter {
}
return modelMatchCnt.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
.map(m -> m.getKey()).findFirst().orElse("");
.map(Map.Entry::getKey).findFirst().orElse("");
}
return semanticSchemaResp.getModelResps().get(0).getBizName();
}

View File

@@ -19,10 +19,8 @@ public class SqlVariableConverter implements QueryConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getStructQueryParam()) && queryStatement.getIsS2SQL()) {
return false;
}
return true;
return Objects.nonNull(queryStatement.getStructQueryParam())
&& !queryStatement.getIsS2SQL();
}
@Override

View File

@@ -1,7 +1,7 @@
package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database;
@@ -22,11 +22,8 @@ public class StructQueryConverter implements QueryConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
if (Objects.nonNull(queryStatement.getStructQueryParam()) && !queryStatement.getIsS2SQL()) {
return true;
}
return false;
return Objects.nonNull(queryStatement.getStructQueryParam())
&& !queryStatement.getIsS2SQL();
}
@Override
@@ -43,8 +40,7 @@ public class StructQueryConverter implements QueryConverter {
sqlGenerateUtils.getOrderBy(structQueryParam),
sqlGenerateUtils.getLimit(structQueryParam));
Database database = queryStatement.getOntology().getDatabase();
EngineType engineType = EngineType.fromString(database.getType().toUpperCase());
if (!sqlGenerateUtils.isSupportWith(engineType, database.getVersion())) {
if (!sqlGenerateUtils.isSupportWith(database.getType(), database.getVersion())) {
sqlParam.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s",
sqlGenerateUtils.getSelect(structQueryParam), dsTable,
@@ -58,7 +54,7 @@ public class StructQueryConverter implements QueryConverter {
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam();
ontologyQueryParam.getDimensions().addAll(structQueryParam.getGroups());
ontologyQueryParam.getMetrics().addAll(structQueryParam.getAggregators().stream()
.map(a -> a.getColumn()).collect(Collectors.toList()));
.map(Aggregator::getColumn).collect(Collectors.toList()));
String where = sqlGenerateUtils.generateWhere(structQueryParam, null);
ontologyQueryParam.setWhere(where);
ontologyQueryParam.setAggOption(AggOption.AGGREGATION);

View File

@@ -3,6 +3,7 @@ package com.tencent.supersonic.headless.core.utils;
import javax.sql.DataSource;
import com.tencent.supersonic.common.pojo.QueryColumn;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.DateUtils;
import com.tencent.supersonic.headless.api.pojo.enums.DataType;
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
@@ -64,7 +65,7 @@ public class SqlUtils {
public SqlUtils init(Database database) {
return SqlUtilsBuilder.getBuilder()
.withName(database.getId() + AT_SYMBOL + database.getName())
.withType(database.getType()).withJdbcUrl(database.getUrl())
.withType(database.getType().getName()).withJdbcUrl(database.getUrl())
.withUsername(database.getUsername()).withPassword(database.getPassword())
.withJdbcDataSource(this.jdbcDataSource).withResultLimit(this.resultLimit)
.withIsQueryLogEnable(this.isQueryLogEnable).build();
@@ -224,7 +225,8 @@ public class SqlUtils {
}
public SqlUtils build() {
Database database = Database.builder().name(this.name).type(this.type).url(this.jdbcUrl)
Database database = Database.builder().name(this.name)
.type(EngineType.fromString(this.type.toUpperCase())).url(this.jdbcUrl)
.username(this.username).password(this.password).build();
SqlUtils sqlUtils = new SqlUtils(database);

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.server.utils;
import com.alibaba.fastjson.JSONObject;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.request.DatabaseReq;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
@@ -16,6 +17,7 @@ public class DatabaseConverter {
public static Database convert(DatabaseResp databaseResp) {
Database database = new Database();
BeanUtils.copyProperties(databaseResp, database);
database.setType(EngineType.fromString(databaseResp.getType().toUpperCase()));
return database;
}

View File

@@ -29,9 +29,9 @@ class HeadlessParserServiceTest {
SqlBuilder aggBuilder = new SqlBuilder(semanticSchema);
QueryStatement queryStatement = new QueryStatement();
queryStatement.setOntologyQueryParam(ontologyQueryParam);
aggBuilder.buildOntologySql(queryStatement);
EngineType engineType =
EngineType.fromString(semanticSchema.getOntology().getDatabase().getType());
String sql = aggBuilder.buildOntologySql(queryStatement);
queryStatement.setSql(sql);
EngineType engineType = semanticSchema.getOntology().getDatabase().getType();
sqlParser.setSql(aggBuilder.getSql(engineType));
} catch (Exception e) {
sqlParser.setErrMsg(e.getMessage());