(improvement)(headless) Update server and core, server calls core one-way (#592)

* (improvement)(headless) Update server and core, server calls core one-way
* (improvement)(Auth) When obtaining the user information, determine whether the user is a system admin.
---------

Co-authored-by: jolunoluo
This commit is contained in:
LXW
2024-01-04 12:08:12 +08:00
committed by GitHub
parent 7acb48da0e
commit 0858c13365
100 changed files with 811 additions and 1516 deletions

View File

@@ -1,255 +0,0 @@
package com.tencent.supersonic.headless.query.domain.calcite;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.api.response.SqlParserResp;
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.IdentifyYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricTypeParamsYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl;
import com.tencent.supersonic.headless.api.enums.AggOption;
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.calcite.HeadlessSchemaManager;
import com.tencent.supersonic.headless.core.parser.calcite.planner.AggPlanner;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
@Slf4j
class HeadlessParserServiceTest {
private static Map<String, HeadlessSchema> headlessSchemaMap = new HashMap<>();
public static SqlParserResp parser(HeadlessSchema headlessSchema, MetricQueryReq metricCommand, boolean isAgg) {
SqlParserResp sqlParser = new SqlParserResp();
if (metricCommand.getRootPath().isEmpty()) {
sqlParser.setErrMsg("rootPath empty");
return sqlParser;
}
try {
if (headlessSchema == null) {
sqlParser.setErrMsg("headlessSchema not found");
return sqlParser;
}
AggPlanner aggBuilder = new AggPlanner(headlessSchema);
QueryStatement queryStatement = new QueryStatement();
queryStatement.setMetricReq(metricCommand);
aggBuilder.explain(queryStatement, AggOption.getAggregation(!isAgg));
sqlParser.setSql(aggBuilder.getSql());
sqlParser.setSourceId(aggBuilder.getSourceId());
} catch (Exception e) {
sqlParser.setErrMsg(e.getMessage());
log.error("parser error MetricCommand[{}] error [{}]", metricCommand, e);
}
return sqlParser;
}
public void test() throws Exception {
DataModelYamlTpl datasource = new DataModelYamlTpl();
datasource.setName("s2_pv_uv_statis");
datasource.setSourceId(1L);
datasource.setSqlQuery("SELECT imp_date, user_name,page,1 as pv, user_name as uv FROM s2_pv_uv_statis");
MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setAgg("sum");
measure.setName("s2_pv_uv_statis_pv");
measure.setExpr("pv");
List<MeasureYamlTpl> measures = new ArrayList<>();
measures.add(measure);
MeasureYamlTpl measure2 = new MeasureYamlTpl();
measure2.setAgg("count");
measure2.setName("s2_pv_uv_statis_internal_cnt");
measure2.setExpr("1");
measure2.setCreateMetric("true");
measures.add(measure2);
MeasureYamlTpl measure3 = new MeasureYamlTpl();
measure3.setAgg("count");
measure3.setName("s2_pv_uv_statis_uv");
measure3.setExpr("uv");
measure3.setCreateMetric("true");
measures.add(measure3);
datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setName("imp_date");
dimension.setExpr("imp_date");
dimension.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams.setIsPrimary("true");
dimensionTimeTypeParams.setTimeGranularity("day");
dimension.setTypeParams(dimensionTimeTypeParams);
List<DimensionYamlTpl> dimensions = new ArrayList<>();
dimensions.add(dimension);
DimensionYamlTpl dimension2 = new DimensionYamlTpl();
dimension2.setName("sys_imp_date");
dimension2.setExpr("imp_date");
dimension2.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams2 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams2.setIsPrimary("true");
dimensionTimeTypeParams2.setTimeGranularity("day");
dimension2.setTypeParams(dimensionTimeTypeParams2);
dimensions.add(dimension2);
DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setName("sys_imp_week");
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
dimension3.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams3.setIsPrimary("true");
dimensionTimeTypeParams3.setTimeGranularity("day");
dimension3.setTypeParams(dimensionTimeTypeParams3);
dimensions.add(dimension3);
datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>();
IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setName("user_name");
identify.setType("primary");
identifies.add(identify);
datasource.setIdentifiers(identifies);
HeadlessSchema headlessSchema = HeadlessSchema.newBuilder("s2").build();
HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getDatasource(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setExpr("page");
dimension1.setName("page");
dimension1.setType("categorical");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
dimensionYamlTpls.add(dimension1);
HeadlessSchemaManager.update(headlessSchema, "s2_pv_uv_statis",
HeadlessSchemaManager.getDimensions(dimensionYamlTpls));
MetricYamlTpl metric1 = new MetricYamlTpl();
metric1.setName("pv");
metric1.setType("expr");
MetricTypeParamsYamlTpl metricTypeParams = new MetricTypeParamsYamlTpl();
List<MeasureYamlTpl> measures1 = new ArrayList<>();
MeasureYamlTpl measure1 = new MeasureYamlTpl();
measure1.setName("s2_pv_uv_statis_pv");
measures1.add(measure1);
metricTypeParams.setMeasures(measures1);
metricTypeParams.setExpr("s2_pv_uv_statis_pv");
metric1.setTypeParams(metricTypeParams);
List<MetricYamlTpl> metric = new ArrayList<>();
metric.add(metric1);
MetricYamlTpl metric2 = new MetricYamlTpl();
metric2.setName("uv");
metric2.setType("expr");
MetricTypeParamsYamlTpl metricTypeParams1 = new MetricTypeParamsYamlTpl();
List<MeasureYamlTpl> measures2 = new ArrayList<>();
MeasureYamlTpl measure4 = new MeasureYamlTpl();
measure4.setName("s2_pv_uv_statis_uv");
measures2.add(measure4);
metricTypeParams1.setMeasures(measures2);
metricTypeParams1.setExpr("s2_pv_uv_statis_uv");
metric2.setTypeParams(metricTypeParams1);
metric.add(metric2);
HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric));
MetricQueryReq metricCommand = new MetricQueryReq();
metricCommand.setRootPath("s2");
metricCommand.setDimensions(new ArrayList<>(Arrays.asList("sys_imp_date")));
metricCommand.setMetrics(new ArrayList<>(Arrays.asList("pv")));
metricCommand.setWhere("user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
metricCommand.setLimit(1000L);
List<ColumnOrder> orders = new ArrayList<>();
orders.add(ColumnOrder.buildDesc("sys_imp_date"));
metricCommand.setOrder(orders);
System.out.println(parser(headlessSchema, metricCommand, true));
addDepartment(headlessSchema);
MetricQueryReq metricCommand2 = new MetricQueryReq();
metricCommand2.setRootPath("s2");
metricCommand2.setDimensions(new ArrayList<>(
Arrays.asList("sys_imp_date", "user_name__department", "user_name", "user_name__page")));
metricCommand2.setMetrics(new ArrayList<>(Arrays.asList("pv")));
metricCommand2.setWhere(
"user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
metricCommand2.setLimit(1000L);
List<ColumnOrder> orders2 = new ArrayList<>();
orders2.add(ColumnOrder.buildDesc("sys_imp_date"));
metricCommand2.setOrder(orders2);
System.out.println(parser(headlessSchema, metricCommand2, true));
}
private static void addDepartment(HeadlessSchema headlessSchema) {
DataModelYamlTpl datasource = new DataModelYamlTpl();
datasource.setName("user_department");
datasource.setSourceId(1L);
datasource.setSqlQuery("SELECT imp_date,user_name,department FROM s2_user_department");
MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setAgg("count");
measure.setName("user_department_internal_cnt");
measure.setCreateMetric("true");
measure.setExpr("1");
List<MeasureYamlTpl> measures = new ArrayList<>();
measures.add(measure);
datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setName("sys_imp_date");
dimension.setExpr("imp_date");
dimension.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams.setIsPrimary("true");
dimensionTimeTypeParams.setTimeGranularity("day");
dimension.setTypeParams(dimensionTimeTypeParams);
List<DimensionYamlTpl> dimensions = new ArrayList<>();
dimensions.add(dimension);
DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setName("sys_imp_week");
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
dimension3.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams3.setIsPrimary("true");
dimensionTimeTypeParams3.setTimeGranularity("week");
dimension3.setTypeParams(dimensionTimeTypeParams3);
dimensions.add(dimension3);
datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>();
IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setName("user_name");
identify.setType("primary");
identifies.add(identify);
datasource.setIdentifiers(identifies);
headlessSchema.getDatasource().put("user_department", HeadlessSchemaManager.getDatasource(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setExpr("department");
dimension1.setName("department");
dimension1.setType("categorical");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
dimensionYamlTpls.add(dimension1);
headlessSchema.getDimension()
.put("user_department", HeadlessSchemaManager.getDimensions(dimensionYamlTpls));
}
}

View File

@@ -1,61 +0,0 @@
package com.tencent.supersonic.headless.query.domain.calcite;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.DateConf.DateMode;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.headless.api.request.QueryStructReq;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.optimizer.QueryOptimizer;
import com.tencent.supersonic.headless.core.parser.QueryParser;
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.core.utils.QueryUtils;
import lombok.extern.slf4j.Slf4j;
import java.util.Arrays;
@Slf4j
public class MaterializationQueryTest {
private final QueryParser queryParser;
private final QueryUtils queryUtils;
public MaterializationQueryTest(QueryParser queryParser,
QueryUtils queryUtils) {
this.queryParser = queryParser;
this.queryUtils = queryUtils;
}
public void test() {
QueryStructReq queryStructReq = new QueryStructReq();
queryStructReq.setModelId(1L);
Aggregator aggregator = new Aggregator();
aggregator.setFunc(AggOperatorEnum.UNKNOWN);
aggregator.setColumn("pv");
queryStructReq.setAggregators(Arrays.asList(aggregator));
queryStructReq.setGroups(Arrays.asList("department"));
DateConf dateConf = new DateConf();
dateConf.setDateMode(DateMode.LIST);
dateConf.setDateList(Arrays.asList("2023-08-01"));
queryStructReq.setDateInfo(dateConf);
try {
QueryStatement queryStatement = new QueryStatement();
queryStatement.setQueryStructReq(queryStructReq);
queryStatement.setIsS2SQL(false);
queryStatement = queryParser.logicSql(queryStatement);
queryUtils.checkSqlParse(queryStatement);
queryStatement.setModelIds(queryStructReq.getModelIds());
log.info("queryStatement:{}", queryStatement);
for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) {
queryOptimizer.rewrite(queryStructReq, queryStatement);
}
//queryParser.test(queryStructReq,metricReq);
log.info("queryStatement:{}", queryStatement);
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@@ -1,124 +0,0 @@
package com.tencent.supersonic.headless.query.service;
import com.alibaba.excel.util.FileUtils;
import com.google.common.collect.Lists;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.util.DateUtils;
import com.tencent.supersonic.headless.api.request.BatchDownloadReq;
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension;
import com.tencent.supersonic.headless.api.pojo.RelateDimension;
import com.tencent.supersonic.headless.api.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.response.ModelSchemaResp;
import com.tencent.supersonic.headless.api.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.core.service.QueryService;
import com.tencent.supersonic.headless.core.service.impl.DownloadServiceImpl;
import com.tencent.supersonic.headless.server.service.ModelService;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import java.io.File;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
class DownloadServiceImplTest {
@Test
void testBatchDownload() throws Exception {
ModelService modelService = Mockito.mock(ModelService.class);
QueryService queryService = Mockito.mock(QueryService.class);
when(modelService.fetchModelSchema(any())).thenReturn(Lists.newArrayList(mockModelSchemaResp()));
when(queryService.queryByStruct(any(), any())).thenReturn(mockQueryResult());
DownloadServiceImpl downloadService = new DownloadServiceImpl(modelService, queryService);
String fileName = String.format("%s_%s.xlsx", "supersonic", DateUtils.format(new Date(), DateUtils.FORMAT));
File file = FileUtils.createTmpFile(fileName);
downloadService.batchDownload(buildBatchDownloadReq(), User.getFakeUser(), file);
}
private ModelSchemaResp mockModelSchemaResp() {
ModelSchemaResp modelSchemaResp = new ModelSchemaResp();
modelSchemaResp.setId(1L);
List<MetricSchemaResp> metricResps = Lists.newArrayList();
metricResps.add(mockMetricPv());
metricResps.add(mockMetricUv());
modelSchemaResp.setMetrics(metricResps);
List<DimSchemaResp> dimSchemaResps = Lists.newArrayList();
dimSchemaResps.add(mockDimension(1L, "user_name", "用户名"));
dimSchemaResps.add(mockDimension(2L, "department", "部门"));
dimSchemaResps.add(mockDimension(3L, "page", "页面"));
modelSchemaResp.setDimensions(dimSchemaResps);
return modelSchemaResp;
}
private MetricSchemaResp mockMetric(Long id, String bizName, String name, List<Long> drillDownloadDimensions) {
MetricSchemaResp metricResp = new MetricSchemaResp();
metricResp.setId(id);
metricResp.setBizName(bizName);
metricResp.setName(name);
RelateDimension relateDimension = new RelateDimension();
relateDimension.setDrillDownDimensions(drillDownloadDimensions.stream()
.map(DrillDownDimension::new).collect(Collectors.toList()));
metricResp.setRelateDimension(relateDimension);
return metricResp;
}
private DimSchemaResp mockDimension(Long id, String bizName, String name) {
DimSchemaResp dimSchemaResp = new DimSchemaResp();
dimSchemaResp.setId(id);
dimSchemaResp.setBizName(bizName);
dimSchemaResp.setName(name);
return dimSchemaResp;
}
private MetricSchemaResp mockMetricPv() {
return mockMetric(1L, "pv", "访问次数", Lists.newArrayList(1L, 2L));
}
private MetricSchemaResp mockMetricUv() {
return mockMetric(2L, "uv", "访问用户数", Lists.newArrayList(2L));
}
private BatchDownloadReq buildBatchDownloadReq() {
BatchDownloadReq batchDownloadReq = new BatchDownloadReq();
batchDownloadReq.setMetricIds(Lists.newArrayList(1L));
batchDownloadReq.setDateInfo(mockDataConf());
return batchDownloadReq;
}
private DateConf mockDataConf() {
DateConf dateConf = new DateConf();
dateConf.setStartDate("2023-10-11");
dateConf.setEndDate("2023-10-15");
dateConf.setDateMode(DateConf.DateMode.BETWEEN);
return dateConf;
}
private QueryResultWithSchemaResp mockQueryResult() {
QueryResultWithSchemaResp queryResultWithSchemaResp = new QueryResultWithSchemaResp();
List<Map<String, Object>> resultList = Lists.newArrayList();
resultList.add(createMap("2023-10-11", "tom", "hr", "1"));
resultList.add(createMap("2023-10-12", "alice", "sales", "2"));
resultList.add(createMap("2023-10-13", "jack", "sales", "3"));
resultList.add(createMap("2023-10-14", "luck", "market", "4"));
resultList.add(createMap("2023-10-15", "tom", "hr", "5"));
queryResultWithSchemaResp.setResultList(resultList);
return queryResultWithSchemaResp;
}
private static Map<String, Object> createMap(String sysImpDate, String d1, String d2, String m1) {
Map<String, Object> map = new HashMap<>();
map.put("sys_imp_date", sysImpDate);
map.put("user_name", d1);
map.put("department", d2);
map.put("pv", m1);
return map;
}
}

View File

@@ -1,39 +0,0 @@
package com.tencent.supersonic.headless.query.utils;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.headless.core.utils.DataTransformUtils;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class DataTransformUtilsTest {
@Test
public void testTransform() {
List<Map<String, Object>> inputData = new ArrayList<>();
inputData.add(createMap("2023/10/11", "a", "b", "1"));
inputData.add(createMap("2023/10/12", "a", "c", "2"));
inputData.add(createMap("2023/10/13", "a", "b", "3"));
inputData.add(createMap("2023/10/14", "a", "c", "4"));
inputData.add(createMap("2023/10/15", "b", "b", "5"));
List<String> groups = Lists.newArrayList("d1", "d2");
String metric = "m1";
List<Map<String, Object>> resultData = DataTransformUtils.transform(inputData,
metric, groups, new DateConf());
Assertions.assertEquals(3, resultData.size());
}
private static Map<String, Object> createMap(String sysImpDate, String d1, String d2, String m1) {
Map<String, Object> map = new HashMap<>();
map.put("sys_imp_date", sysImpDate);
map.put("d1", d1);
map.put("d2", d2);
map.put("m1", m1);
return map;
}
}