mirror of
https://github.com/tencentmusic/supersonic.git
synced 2025-12-15 22:46:49 +00:00
first commit
This commit is contained in:
109
launchers/semantic/pom.xml
Normal file
109
launchers/semantic/pom.xml
Normal file
@@ -0,0 +1,109 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>launchers</artifactId>
|
||||
<groupId>com.tencent.supersonic</groupId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>launchers-semantic</artifactId>
|
||||
|
||||
<properties>
|
||||
<maven.compiler.source>8</maven.compiler.source>
|
||||
<maven.compiler.target>8</maven.compiler.target>
|
||||
<start-class>com.tencent.supersonic.SemanticLauncher</start-class>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.servlet.jsp</groupId>
|
||||
<artifactId>jsp-api</artifactId>
|
||||
<version>2.0</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.tencent.supersonic</groupId>
|
||||
<artifactId>semantic-query</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.tencent.supersonic</groupId>
|
||||
<artifactId>semantic-core</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.tencent.supersonic</groupId>
|
||||
<artifactId>launchers-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.tencent.supersonic</groupId>
|
||||
<artifactId>auth-authentication</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.tencent.supersonic</groupId>
|
||||
<artifactId>auth-authorization</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.tencent.supersonic</groupId>
|
||||
<artifactId>auth-api</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.h2database</groupId>
|
||||
<artifactId>h2</artifactId>
|
||||
<version>${h2.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<version>2.4</version>
|
||||
<configuration>
|
||||
<excludes>
|
||||
<exclude>*.*</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<version>2.4</version>
|
||||
<configuration>
|
||||
<skipAssembly>false</skipAssembly>
|
||||
<archive>
|
||||
<manifest>
|
||||
<mainClass>${start-class}</mainClass>
|
||||
</manifest>
|
||||
</archive>
|
||||
<descriptors>
|
||||
<descriptor>../../assembly/build/build.xml</descriptor>
|
||||
</descriptors>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>make-assembly</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
2
launchers/semantic/src/main/bin/env.sh
Normal file
2
launchers/semantic/src/main/bin/env.sh
Normal file
@@ -0,0 +1,2 @@
|
||||
export APP_NAME=semantic-service
|
||||
export MAIN_CLASS=com.tencent.supersonic.SemanticLauncher
|
||||
38
launchers/semantic/src/main/bin/run.sh
Executable file
38
launchers/semantic/src/main/bin/run.sh
Executable file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
binDir=$(cd "$(dirname "$0")"; pwd)
|
||||
baseDir=$(readlink -f $binDir/../)
|
||||
libDir=$baseDir/lib
|
||||
confDir=$baseDir/conf
|
||||
webDir=$baseDir/webapp
|
||||
|
||||
source ${baseDir}/bin/env.sh
|
||||
|
||||
|
||||
CLASSPATH=""
|
||||
CLASSPATH=$CLASSPATH:$confDir
|
||||
|
||||
for jarPath in $libDir/*.jar; do
|
||||
CLASSPATH=$CLASSPATH:$jarPath
|
||||
done
|
||||
|
||||
|
||||
|
||||
export CLASSPATH
|
||||
export LANG="zh_CN.UTF-8"
|
||||
|
||||
cd $baseDir
|
||||
|
||||
if [[ "$JAVA_HOME" == "" ]]; then
|
||||
JAVA_HOME=$(ls /usr/jdk64/jdk* -d 2>/dev/null | xargs | awk '{print "'$APP_NAME'"}')
|
||||
fi
|
||||
export PATH=$JAVA_HOME/bin:$PATH
|
||||
|
||||
command="-Dfile.encoding="UTF-8" -Duser.language="Zh" -Duser.region="CN" -Duser.timezone="GMT+08" -Xms1024m -Xmx2048m "$MAIN_CLASS
|
||||
|
||||
mkdir -p $baseDir/logs
|
||||
if [[ "$is_test" == "true" ]]; then
|
||||
java -Dspring.profiles.active="dev" $command >/dev/null 2>$baseDir/logs/error.log &
|
||||
else
|
||||
java $command $baseDir >/dev/null 2>$baseDir/logs/error.log &
|
||||
fi
|
||||
55
launchers/semantic/src/main/bin/service.sh
Executable file
55
launchers/semantic/src/main/bin/service.sh
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
binDir=$(cd "$(dirname "$0")"; pwd)
|
||||
baseDir=$(readlink -f $binDir/../)
|
||||
confDir=$baseDir/conf
|
||||
source ${baseDir}/bin/env.sh
|
||||
|
||||
commond=$1
|
||||
|
||||
function start()
|
||||
{
|
||||
pid=$(ps aux | grep $MAIN_CLASS | grep -v grep |grep $baseDir | awk '{print "'$APP_NAME'"}')
|
||||
if [[ "$pid" == "" ]]; then
|
||||
logs=$baseDir/logs/service.sh.log
|
||||
env DEPLOY=true $baseDir/bin/run.sh $MAIN_CLASS && echo "Process started, see logs/error with logs/error command"
|
||||
return 0
|
||||
else
|
||||
echo "Process (PID = $pid) is running."
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function stop()
|
||||
{
|
||||
pid=$(ps aux | grep $MAIN_CLASS | grep -v grep|grep $baseDir| awk '{print $2}')
|
||||
if [[ "$pid" == "" ]]; then
|
||||
echo "Process is not running !"
|
||||
return 1
|
||||
else
|
||||
kill -9 $pid
|
||||
echo "Process (PID = $pid) is killed !"
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
case "$commond" in
|
||||
start)
|
||||
echo -e "Starting $APP_NAME"
|
||||
start
|
||||
;;
|
||||
stop)
|
||||
echo -e "Stopping $APP_NAME"
|
||||
stop
|
||||
;;
|
||||
restart)
|
||||
echo -e "Resetting $APP_NAME"
|
||||
stop
|
||||
start
|
||||
;;
|
||||
*)
|
||||
echo "Use command {start|stop|status|restart} to run."
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit 0
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.tencent.supersonic;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
|
||||
|
||||
/**
|
||||
* Semantic Launcher
|
||||
**/
|
||||
@Slf4j
|
||||
@SpringBootApplication(scanBasePackages = {"com.tencent.supersonic"},
|
||||
exclude = {MongoAutoConfiguration.class, MongoDataAutoConfiguration.class})
|
||||
public class SemanticLauncher {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(SemanticLauncher.class, args);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
package com.tencent.supersonic.db;
|
||||
|
||||
import com.github.pagehelper.PageInterceptor;
|
||||
import javax.sql.DataSource;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
import org.apache.ibatis.plugin.Interceptor;
|
||||
import org.apache.ibatis.session.SqlSessionFactory;
|
||||
import org.mybatis.spring.SqlSessionFactoryBean;
|
||||
import org.mybatis.spring.annotation.MapperScan;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
|
||||
|
||||
|
||||
@Configuration
|
||||
@MapperScan(value = "com.tencent.supersonic", annotationClass = Mapper.class)
|
||||
public class MybatisConfig {
|
||||
|
||||
private static final String MAPPER_LOCATION = "classpath*:mapper/**/*.xml";
|
||||
|
||||
@Bean
|
||||
public SqlSessionFactory sqlSessionFactory(DataSource dataSource, PageInterceptor pageInterceptor)
|
||||
throws Exception {
|
||||
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
|
||||
org.apache.ibatis.session.Configuration configuration = new org.apache.ibatis.session.Configuration();
|
||||
configuration.setMapUnderscoreToCamelCase(true);
|
||||
bean.setConfiguration(configuration);
|
||||
bean.setDataSource(dataSource);
|
||||
bean.setPlugins(new Interceptor[]{pageInterceptor});
|
||||
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources(MAPPER_LOCATION));
|
||||
return bean.getObject();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.tencent.supersonic.db;
|
||||
|
||||
import com.github.pagehelper.PageInterceptor;
|
||||
import java.util.Properties;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
|
||||
@Configuration
|
||||
public class PageHelperConfig {
|
||||
|
||||
@Bean
|
||||
public PageInterceptor pageInterceptor() {
|
||||
PageInterceptor pageInterceptor = new PageInterceptor();
|
||||
Properties properties = new Properties();
|
||||
properties.setProperty("helperDialect", "h2");
|
||||
pageInterceptor.setProperties(properties);
|
||||
return pageInterceptor;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
com.tencent.supersonic.auth.authentication.domain.interceptor.AuthenticationInterceptor=\
|
||||
com.tencent.supersonic.auth.authentication.domain.interceptor.DefaultAuthenticationInterceptor
|
||||
23
launchers/semantic/src/main/resources/application-local.yaml
Normal file
23
launchers/semantic/src/main/resources/application-local.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
server:
|
||||
servlet:
|
||||
context-path:
|
||||
port: 9081
|
||||
|
||||
spring:
|
||||
h2:
|
||||
console:
|
||||
path: /h2-console/semantic
|
||||
# enabled web
|
||||
enabled: true
|
||||
datasource:
|
||||
driver-class-name: org.h2.Driver
|
||||
url: jdbc:h2:mem:semantic;DATABASE_TO_UPPER=false
|
||||
username: root
|
||||
password: semantic
|
||||
schema: classpath:db/semantic-schema-h2.sql
|
||||
data: classpath:db/semantic-data-h2.sql
|
||||
|
||||
authentication:
|
||||
enable: true
|
||||
exclude:
|
||||
path: /api/auth/user/register,/api/auth/user/login
|
||||
5
launchers/semantic/src/main/resources/application.yaml
Normal file
5
launchers/semantic/src/main/resources/application.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
spring:
|
||||
profiles:
|
||||
active: local
|
||||
application:
|
||||
name: semantic
|
||||
1047
launchers/semantic/src/main/resources/db/semantic-data-h2.sql
Normal file
1047
launchers/semantic/src/main/resources/db/semantic-data-h2.sql
Normal file
File diff suppressed because it is too large
Load Diff
239
launchers/semantic/src/main/resources/db/semantic-schema-h2.sql
Normal file
239
launchers/semantic/src/main/resources/db/semantic-schema-h2.sql
Normal file
@@ -0,0 +1,239 @@
|
||||
CREATE TABLE IF NOT EXISTS `s2_domain` (
|
||||
`id` INT NOT NULL AUTO_INCREMENT ,
|
||||
`name` varchar(255) DEFAULT NULL , -- domain name
|
||||
`biz_name` varchar(255) DEFAULT NULL , -- internal name
|
||||
`parent_id` INT DEFAULT '0' , -- parent domain ID
|
||||
`status` INT NOT NULL ,
|
||||
`created_at` TIMESTAMP DEFAULT NULL ,
|
||||
`created_by` varchar(100) DEFAULT NULL ,
|
||||
`updated_at` TIMESTAMP DEFAULT NULL ,
|
||||
`updated_by` varchar(100) DEFAULT NULL ,
|
||||
`is_unique` INT DEFAULT NULL , -- 0 is non-unique, 1 is unique
|
||||
`admin` varchar(3000) DEFAULT NULL , -- domain administrator
|
||||
`admin_org` varchar(3000) DEFAULT NULL , -- domain administrators organization
|
||||
`is_open` TINYINT DEFAULT NULL , -- whether the domain is public
|
||||
`viewer` varchar(3000) DEFAULT NULL , -- domain available users
|
||||
`view_org` varchar(3000) DEFAULT NULL , -- domain available organization
|
||||
PRIMARY KEY (`id`)
|
||||
);
|
||||
COMMENT ON TABLE s2_domain IS 'domain basic information';
|
||||
|
||||
|
||||
CREATE TABLE `s2_database` (
|
||||
`id` INT NOT NULL AUTO_INCREMENT,
|
||||
`domain_id` INT NOT NULL ,
|
||||
`name` varchar(255) NOT NULL ,
|
||||
`description` varchar(500) DEFAULT NULL ,
|
||||
`type` varchar(20) NOT NULL , -- type: mysql,clickhouse,tdw
|
||||
`config` varchar(655) NOT NULL ,
|
||||
`created_at` TIMESTAMP NOT NULL ,
|
||||
`created_by` varchar(100) NOT NULL ,
|
||||
`updated_at` TIMESTAMP NOT NULL ,
|
||||
`updated_by` varchar(100) NOT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
);
|
||||
COMMENT ON TABLE s2_database IS 'database instance table';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `s2_datasource` (
|
||||
`id` INT NOT NULL AUTO_INCREMENT,
|
||||
`domain_id` INT NOT NULL ,
|
||||
`name` varchar(255) NOT NULL ,
|
||||
`biz_name` varchar(255) NOT NULL ,
|
||||
`description` varchar(500) DEFAULT NULL ,
|
||||
`database_id` INT NOT NULL ,
|
||||
`datasource_detail` LONGVARCHAR NOT NULL ,
|
||||
`created_at` TIMESTAMP NOT NULL ,
|
||||
`created_by` varchar(100) NOT NULL ,
|
||||
`updated_at` TIMESTAMP NOT NULL ,
|
||||
`updated_by` varchar(100) NOT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
);
|
||||
COMMENT ON TABLE s2_datasource IS 'datasource table';
|
||||
|
||||
create table s2_user
|
||||
(
|
||||
id INT AUTO_INCREMENT,
|
||||
name varchar(100) not null,
|
||||
display_name varchar(100) null,
|
||||
password varchar(100) null,
|
||||
email varchar(100) null,
|
||||
PRIMARY KEY (`id`)
|
||||
);
|
||||
COMMENT ON TABLE s2_user IS 'user information table';
|
||||
|
||||
create table s2_auth_groups
|
||||
(
|
||||
group_id INT,
|
||||
config varchar(2048),
|
||||
PRIMARY KEY (`group_id`)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `s2_metric` (
|
||||
`id` INT NOT NULL AUTO_INCREMENT,
|
||||
`domain_id` INT NOT NULL ,
|
||||
`name` varchar(255) NOT NULL ,
|
||||
`biz_name` varchar(255) NOT NULL ,
|
||||
`description` varchar(500) DEFAULT NULL ,
|
||||
`status` INT NOT NULL , -- status, 0 is normal, 1 is off the shelf, 2 is deleted
|
||||
`sensitive_level` INT NOT NULL ,
|
||||
`type` varchar(50) NOT NULL , -- type proxy,expr
|
||||
`type_params` LONGVARCHAR DEFAULT NULL ,
|
||||
`created_at` TIMESTAMP NOT NULL ,
|
||||
`created_by` varchar(100) NOT NULL ,
|
||||
`updated_at` TIMESTAMP NOT NULL ,
|
||||
`updated_by` varchar(100) NOT NULL ,
|
||||
`data_format_type` varchar(50) DEFAULT NULL ,
|
||||
`data_format` varchar(500) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
);
|
||||
COMMENT ON TABLE s2_metric IS 'metric information table';
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `s2_dimension` (
|
||||
`id` INT NOT NULL AUTO_INCREMENT ,
|
||||
`domain_id` INT NOT NULL ,
|
||||
`datasource_id` INT NOT NULL ,
|
||||
`name` varchar(255) NOT NULL ,
|
||||
`biz_name` varchar(255) NOT NULL ,
|
||||
`description` varchar(500) NOT NULL ,
|
||||
`status` INT NOT NULL , -- status, 0 is normal, 1 is off the shelf, 2 is deleted
|
||||
`sensitive_level` INT DEFAULT NULL ,
|
||||
`type` varchar(50) NOT NULL , -- type categorical,time
|
||||
`type_params` LONGVARCHAR DEFAULT NULL ,
|
||||
`expr` LONGVARCHAR NOT NULL , -- expression
|
||||
`created_at` TIMESTAMP NOT NULL ,
|
||||
`created_by` varchar(100) NOT NULL ,
|
||||
`updated_at` TIMESTAMP NOT NULL ,
|
||||
`updated_by` varchar(100) NOT NULL ,
|
||||
`semantic_type` varchar(20) NOT NULL, -- semantic type: DATE, ID, CATEGORY
|
||||
PRIMARY KEY (`id`)
|
||||
);
|
||||
COMMENT ON TABLE s2_dimension IS 'dimension information table';
|
||||
|
||||
create table s2_datasource_rela
|
||||
(
|
||||
id INT AUTO_INCREMENT,
|
||||
domain_id INT null,
|
||||
datasource_from INT null,
|
||||
datasource_to INT null,
|
||||
join_key varchar(100) null,
|
||||
created_at TIMESTAMP null,
|
||||
created_by varchar(100) null,
|
||||
updated_at TIMESTAMP null,
|
||||
updated_by varchar(100) null,
|
||||
PRIMARY KEY (`id`)
|
||||
);
|
||||
COMMENT ON TABLE s2_datasource_rela IS 'data source association table';
|
||||
|
||||
create table s2_view_info
|
||||
(
|
||||
id INT auto_increment,
|
||||
domain_id INT null,
|
||||
type varchar(20) null comment 'datasource、dimension、metric',
|
||||
config LONGVARCHAR null comment 'config detail',
|
||||
created_at TIMESTAMP null,
|
||||
created_by varchar(100) null,
|
||||
updated_at TIMESTAMP null,
|
||||
updated_by varchar(100) not null
|
||||
);
|
||||
COMMENT ON TABLE s2_view_info IS 'view information table';
|
||||
|
||||
|
||||
CREATE TABLE `s2_query_stat_info` (
|
||||
`id` INT NOT NULL AUTO_INCREMENT,
|
||||
`trace_id` varchar(200) DEFAULT NULL, -- query unique identifier
|
||||
`domain_id` INT DEFAULT NULL,
|
||||
`user` varchar(200) DEFAULT NULL,
|
||||
`created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ,
|
||||
`query_type` varchar(200) DEFAULT NULL, -- the corresponding scene
|
||||
`query_type_back` INT DEFAULT '0' , -- query type, 0-normal query, 1-pre-refresh type
|
||||
`query_sql_cmd`LONGVARCHAR , -- sql type request parameter
|
||||
`sql_cmd_md5` varchar(200) DEFAULT NULL, -- sql type request parameter md5
|
||||
`query_struct_cmd`LONGVARCHAR , -- struct type request parameter
|
||||
`struct_cmd_md5` varchar(200) DEFAULT NULL, -- struct type request parameter md5值
|
||||
`sql`LONGVARCHAR ,
|
||||
`sql_md5` varchar(200) DEFAULT NULL, -- sql md5
|
||||
`query_engine` varchar(20) DEFAULT NULL,
|
||||
`elapsed_ms` bigINT DEFAULT NULL,
|
||||
`query_state` varchar(20) DEFAULT NULL,
|
||||
`native_query` INT DEFAULT NULL, -- 1-detail query, 0-aggregation query
|
||||
`start_date` varchar(50) DEFAULT NULL,
|
||||
`end_date` varchar(50) DEFAULT NULL,
|
||||
`dimensions`LONGVARCHAR , -- dimensions involved in sql
|
||||
`metrics`LONGVARCHAR , -- metric involved in sql
|
||||
`select_cols`LONGVARCHAR ,
|
||||
`agg_cols`LONGVARCHAR ,
|
||||
`filter_cols`LONGVARCHAR ,
|
||||
`group_by_cols`LONGVARCHAR ,
|
||||
`order_by_cols`LONGVARCHAR ,
|
||||
`use_result_cache` TINYINT DEFAULT '-1' , -- whether to hit the result cache
|
||||
`use_sql_cache` TINYINT DEFAULT '-1' , -- whether to hit the sql cache
|
||||
`sql_cache_key`LONGVARCHAR , -- sql cache key
|
||||
`result_cache_key`LONGVARCHAR , -- result cache key
|
||||
PRIMARY KEY (`id`)
|
||||
) ;
|
||||
COMMENT ON TABLE s2_query_stat_info IS 'query statistics table';
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `s2_semantic_pasre_info` (
|
||||
`id` INT NOT NULL AUTO_INCREMENT,
|
||||
`trace_id` varchar(200) NOT NULL ,
|
||||
`domain_id` INT NOT NULL ,
|
||||
`dimensions`LONGVARCHAR ,
|
||||
`metrics`LONGVARCHAR ,
|
||||
`orders`LONGVARCHAR ,
|
||||
`filters`LONGVARCHAR ,
|
||||
`date_info`LONGVARCHAR ,
|
||||
`limit` INT NOT NULL ,
|
||||
`native_query` TINYINT NOT NULL DEFAULT '0' ,
|
||||
`sql`LONGVARCHAR ,
|
||||
`created_at` TIMESTAMP NOT NULL ,
|
||||
`created_by` varchar(100) NOT NULL ,
|
||||
`status` INT NOT NULL ,
|
||||
`elapsed_ms` bigINT DEFAULT NULL ,
|
||||
PRIMARY KEY (`id`)
|
||||
);
|
||||
COMMENT ON TABLE s2_semantic_pasre_info IS 'semantic layer sql parsing information table';
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `s2_available_date_info` (
|
||||
`id` INT NOT NULL AUTO_INCREMENT ,
|
||||
`item_id` INT NOT NULL ,
|
||||
`type` varchar(255) NOT NULL ,
|
||||
`date_format` varchar(64) NOT NULL ,
|
||||
`start_date` varchar(64) ,
|
||||
`end_date` varchar(64) ,
|
||||
`unavailable_date` LONGVARCHAR DEFAULT NULL ,
|
||||
`created_at` TIMESTAMP NOT NULL ,
|
||||
`created_by` varchar(100) NOT NULL ,
|
||||
`updated_at` TIMESTAMP NOT NULL ,
|
||||
`updated_by` varchar(100) NOT NULL ,
|
||||
`status` INT DEFAULT '0', -- 1-in use 0 is normal, 1 is off the shelf, 2 is deleted
|
||||
PRIMARY KEY (`id`)
|
||||
);
|
||||
COMMENT ON TABLE s2_dimension IS 'dimension information table';
|
||||
|
||||
|
||||
-------demo for semantic and chat
|
||||
CREATE TABLE IF NOT EXISTS `s2_user_department` (
|
||||
`user_name` varchar(200) NOT NULL,
|
||||
`department` varchar(200) NOT NULL -- department of user
|
||||
);
|
||||
COMMENT ON TABLE s2_semantic_pasre_info IS 'user_department_info';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `s2_pv_uv_statis` (
|
||||
`imp_date` varchar(200) NOT NULL,
|
||||
`user_name` varchar(200) NOT NULL,
|
||||
`page` varchar(200) NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE s2_semantic_pasre_info IS 'user_access_info';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `s2_stay_time_statis` (
|
||||
`imp_date` varchar(200) NOT NULL,
|
||||
`user_name` varchar(200) NOT NULL,
|
||||
`stay_hours` DOUBLE NOT NULL,
|
||||
`page` varchar(200) NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE s2_stay_time_statis IS 's2_stay_time_statis_info';
|
||||
|
||||
|
||||
93
launchers/semantic/src/main/resources/logback-spring.xml
Normal file
93
launchers/semantic/src/main/resources/logback-spring.xml
Normal file
@@ -0,0 +1,93 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration scan="true">
|
||||
<contextName>logback</contextName>
|
||||
<!-- <property name="LOG_PATH" value="${logback.logdir:-logs}"/>-->
|
||||
<property name="LOG_PATH" value="${LOG_PATH:-logs}"/>
|
||||
<property name="LOG_APPNAME" value="semantic"/>
|
||||
<!--输出到控制台-->
|
||||
<appender name="consoleLog" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{HH:mm:ss} [%thread] %-5level %logger{36} %line - %msg%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<appender name="fileInfoLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<!--日志名称,如果没有File 属性,那么只会使用FileNamePattern的文件路径规则
|
||||
如果同时有<File>和<FileNamePattern>,那么当天日志是<File>,明天会自动把今天
|
||||
的日志改名为今天的日期。即,<File> 的日志都是当天的。
|
||||
-->
|
||||
<File>${LOG_PATH}/info.${LOG_APPNAME}.log</File>
|
||||
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
|
||||
<FileNamePattern>${LOG_PATH}/info.${LOG_APPNAME}.%d{yyyy-MM-dd}.log.gz</FileNamePattern>
|
||||
<!--只保留最近30天的日志-->
|
||||
<maxHistory>30</maxHistory>
|
||||
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
|
||||
<!--<totalSizeCap>1GB</totalSizeCap>-->
|
||||
</rollingPolicy>
|
||||
<!--日志输出编码格式化-->
|
||||
<encoder>
|
||||
<charset>UTF-8</charset>
|
||||
<pattern>%d [%thread] %-5level [%X{TRACE_ID}] %logger{36} %line - %msg%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<appender name="fileErrorLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<!--如果只是想要 Error 级别的日志,那么需要过滤一下,默认是 info 级别的,ThresholdFilter-->
|
||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
|
||||
<level>Error</level>
|
||||
</filter>
|
||||
<!--日志名称,如果没有File 属性,那么只会使用FileNamePattern的文件路径规则
|
||||
如果同时有<File>和<FileNamePattern>,那么当天日志是<File>,明天会自动把今天
|
||||
的日志改名为今天的日期。即,<File> 的日志都是当天的。
|
||||
-->
|
||||
<File>${LOG_PATH}/error.${LOG_APPNAME}.log</File>
|
||||
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
|
||||
<FileNamePattern>${LOG_PATH}/error.${LOG_APPNAME}.%d{yyyy-MM-dd}.log.gz</FileNamePattern>
|
||||
<!--只保留最近90天的日志-->
|
||||
<maxHistory>90</maxHistory>
|
||||
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
|
||||
<!--<totalSizeCap>1GB</totalSizeCap>-->
|
||||
</rollingPolicy>
|
||||
<!--日志输出编码格式化-->
|
||||
<encoder>
|
||||
<charset>UTF-8</charset>
|
||||
<pattern>%d [%thread] %-5level [%X{TRACE_ID}] %logger{36} - %msg%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<root level="INFO">
|
||||
<appender-ref ref="fileInfoLog"/>
|
||||
<appender-ref ref="fileErrorLog"/>
|
||||
<appender-ref ref="consoleLog"/>
|
||||
</root>
|
||||
|
||||
<appender name="serviceLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<!--日志名称,如果没有File 属性,那么只会使用FileNamePattern的文件路径规则
|
||||
如果同时有<File>和<FileNamePattern>,那么当天日志是<File>,明天会自动把今天
|
||||
的日志改名为今天的日期。即,<File> 的日志都是当天的。
|
||||
-->
|
||||
<File>${LOG_PATH}/serviceinfo.${LOG_APPNAME}.log</File>
|
||||
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
|
||||
<FileNamePattern>${LOG_PATH}/serviceinfo.${LOG_APPNAME}.%d{yyyy-MM-dd}.log.gz</FileNamePattern>
|
||||
<!--只保留最近30天的日志-->
|
||||
<maxHistory>30</maxHistory>
|
||||
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
|
||||
<!--<totalSizeCap>1GB</totalSizeCap>-->
|
||||
</rollingPolicy>
|
||||
<!--日志输出编码格式化-->
|
||||
<encoder>
|
||||
<charset>UTF-8</charset>
|
||||
<pattern>%d [%thread] %-5level [%X{TRACE_ID}] %logger{36} %line - %msg%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<logger name="com.tencent.supersonic" level="INFO" additivity="true">
|
||||
<appender-ref ref="serviceLog"/>
|
||||
</logger>
|
||||
</configuration>
|
||||
@@ -0,0 +1,22 @@
|
||||
data_source:
|
||||
name: s2_stay_time_statis
|
||||
sql_query: select imp_date,page,stay_hours from s2_stay_time_statis
|
||||
identifiers:
|
||||
- name: sys_imp_date
|
||||
type: primary
|
||||
expr: imp_date
|
||||
- name: user_name
|
||||
type: primary
|
||||
dimensions:
|
||||
- name: page
|
||||
type: categorical
|
||||
- name: imp_date
|
||||
type: time
|
||||
type_params:
|
||||
is_primary: True
|
||||
time_granularity: day
|
||||
measures:
|
||||
- name: stay_hours
|
||||
agg: sum
|
||||
expr: stay_hours
|
||||
create_metric: True
|
||||
26
launchers/semantic/src/main/resources/model/s2_uv.yaml
Normal file
26
launchers/semantic/src/main/resources/model/s2_uv.yaml
Normal file
@@ -0,0 +1,26 @@
|
||||
data_source:
|
||||
name: s2_pv_uv_statis
|
||||
sql_query: select imp_date,user_name from s2_pv_uv_statis
|
||||
identifiers:
|
||||
- name: sys_imp_date
|
||||
type: primary
|
||||
expr: imp_date
|
||||
- name: user_name
|
||||
type: primary
|
||||
dimensions:
|
||||
- name: page
|
||||
type: categorical
|
||||
- name: sys_imp_date
|
||||
type: time
|
||||
type_params:
|
||||
is_primary: True
|
||||
time_granularity: day
|
||||
measures:
|
||||
- name: uv
|
||||
agg: count_distinct
|
||||
expr: user_name
|
||||
create_metric: True
|
||||
- name: pv
|
||||
agg: sum
|
||||
expr: 1
|
||||
create_metric: True
|
||||
Reference in New Issue
Block a user