62 Commits

Author SHA1 Message Date
zyclove
f7ce9480bb fix:java.io.NotSerializableException: com.tencent.supersonic.common.pojo.Order (#2121)
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-03-03 16:56:23 +08:00
jerryjzhang
f8104687cc (fix)(launcher)Fix mysql schema DDL.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-03-03 11:12:49 +08:00
jerryjzhang
6eba693982 (fix)(docker)Fix Dockerfile.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
(fix)(docker)Fix Dockerfile.
2025-03-02 21:54:17 +08:00
jerryjzhang
b871ae542a (fix)(assembly)Fix windows daemon script. 2025-03-02 18:36:02 +08:00
jerryjzhang
3ca48e1ca1 (release)Release 0.9.10.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-03-02 11:56:23 +08:00
jerryjzhang
ea7238304d (fix)(docker)Fix Dockerfile by removing apt-get install.
(fix)(docker)Fix Dockerfile.

(fix)(docker)Fix Dockerfile.
2025-03-02 11:28:27 +08:00
williamhliu
1746db53c1 (fix)(supersonic-fe) fix the issue where adding, deleting, and modifying domain and model do not automatically update data (#2116)
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-03-01 16:16:29 +08:00
jerryjzhang
90c2f8b374 (fix)(headless)Fix occasional NPE in SqlBuilder.
Some checks failed
supersonic CentOS CI / build (21) (push) Has been cancelled
supersonic mac CI / build (21) (push) Has been cancelled
supersonic ubuntu CI / build (21) (push) Has been cancelled
supersonic windows CI / build (21) (push) Has been cancelled
2025-02-28 17:11:05 +08:00
jerryjzhang
bd64bf1f62 (improvement)(headless)Optimize relationship probe in translation of multi-table join scenarios.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-27 21:53:14 +08:00
jerryjzhang
56cfddea60 (improvement)(format)Follow the code style.
Some checks failed
supersonic CentOS CI / build (21) (push) Has been cancelled
supersonic mac CI / build (21) (push) Has been cancelled
supersonic ubuntu CI / build (21) (push) Has been cancelled
supersonic windows CI / build (21) (push) Has been cancelled
2025-02-26 19:19:25 +08:00
jerryjzhang
0aa002882d (improvement)(license)Change license from MIT to Apache 2.0. 2025-02-26 19:17:06 +08:00
zyclove
5e3bafb953 feat:Support kyuubi presto trino (#2109) 2025-02-26 17:33:14 +08:00
zyclove
11ff99cdbe fix: Caused by: java.io.NotSerializableException: DateConf、SchemaNameLengthComparator、SqlEvaluation and etc. (#2110) 2025-02-26 17:32:20 +08:00
mislayming
f9198cb8e0 (improvement)(chat) Enhancing the capability of embedding with LLM-based secondary judgment. (#2096) 2025-02-24 12:55:39 +08:00
jerryjzhang
b5aa6e046e (feature)(chat)Support agent-level permission management.
Some checks failed
supersonic CentOS CI / build (21) (push) Has been cancelled
supersonic mac CI / build (21) (push) Has been cancelled
supersonic ubuntu CI / build (21) (push) Has been cancelled
supersonic windows CI / build (21) (push) Has been cancelled
2025-02-24 09:25:12 +08:00
williamhliu
29271f7278 (feature)(supersonic-fe) add permission manage in agent (#2095) 2025-02-24 08:18:48 +08:00
jerryjzhang
50ed340ae0 (fix)(headless)Fix NPE issue.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-23 14:55:56 +08:00
jerryjzhang
75f623404d (fix)(headless)Fix aggregator parsing of struct query.
Some checks failed
supersonic CentOS CI / build (21) (push) Has been cancelled
supersonic mac CI / build (21) (push) Has been cancelled
supersonic ubuntu CI / build (21) (push) Has been cancelled
supersonic windows CI / build (21) (push) Has been cancelled
2025-02-21 20:04:12 +08:00
jerryjzhang
94e853f57e [improvement][headless]Support dataSetNames that contain dash.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
[improvement][headless]Support dataSetNames that contain dash.

[improvement][headless]Support dataSetNames that contain dash.
2025-02-21 01:25:02 +08:00
jerryjzhang
5fa3607874 [fix][chat]Fix NPE issue. 2025-02-21 00:07:14 +08:00
jerryjzhang
1e01f3ef60 (improvement)(headless)Optimize metric matching in populating data format.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-20 19:06:55 +08:00
beat4ocean
1155ac10d8 [fix][project] Fix the issue of SpringDoc not working. (#2081) 2025-02-20 18:31:50 +08:00
jacktpy
5a22590661 [fix]修复快速创建模型时不能自动创建维度 (#2083) 2025-02-20 18:24:33 +08:00
jerryjzhang
fc67411618 (fix)(launcher)Fix database initialization script of mysql and psotgresql.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-20 12:31:17 +08:00
jacktpy
c03be2f5d8 [fix]修复创建模型catalog校验异常 (#2077) 2025-02-20 11:02:40 +08:00
jerryjzhang
08a2e889e7 [feature][headless]Introduce TranslatorConfig to make result limit configurable via system parameter.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-19 23:25:56 +08:00
jerryjzhang
87fa778416 [fix][launcher]Fix S2VisitsDemo to avoid broken demo. 2025-02-19 22:00:55 +08:00
jerryjzhang
b70b7ed01a (improvement)(launcher)Auto increment init user.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-19 18:11:36 +08:00
jerryjzhang
335e1f9ada (fix)(headless)Fix updating measure agg doesn't take effect. 2025-02-19 18:11:02 +08:00
Hyman_bz
33268bf3d9 feat: add support starrocks and multiple catalog (#2066) 2025-02-19 18:00:22 +08:00
beat4ocean
86b9d2013a [fix][headless-fe] Fix the issue of incorrect time sorting in charts. (#2069) 2025-02-19 17:34:31 +08:00
jerryjzhang
aced1dfd3e (fix)(launcher)Fix swagger docs. 2025-02-19 14:15:09 +08:00
jerryjzhang
d1e5e8777a (improvement)(chat)Try to find all fields in the same model to avoid unnecessary join.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-18 21:29:39 +08:00
jerryjzhang
b743585d3e (fix)(headless)Fix NPE issue. 2025-02-18 19:07:39 +08:00
weimengdalao
9aa305ca7a [fix][headless]fix Check if metricFilters is empty not dimensionFilters (#2064) 2025-02-18 12:49:43 +08:00
jerryjzhang
fe51882031 Merge remote-tracking branch 'origin/master' 2025-02-18 12:48:35 +08:00
jerryjzhang
17a3dd052c (improvement)(chat)LLM might output table or column with `` enclose, should handle with it. 2025-02-18 12:48:26 +08:00
jerryjzhang
2e71b9b892 [improvement][docker]Deprecate and remove db_init container in docker-compose.yml.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-17 22:55:22 +08:00
jerryjzhang
a067d2cace Merge remote-tracking branch 'origin/master' 2025-02-17 21:21:49 +08:00
jerryjzhang
32793ecf69 (improvement)(chat)Determine if with statement is supported and send explicitly message in the prompt to the LLM. 2025-02-17 21:21:43 +08:00
zyclove
43b96edc77 fix: Cannot find module 'antd/lib/avatar/avatar' with antd 5.24.0 (#2062)
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-17 16:17:36 +08:00
jerryjzhang
f31db98aba (improvement)(project)Introduce aibi-env.sh script to simplify user settings.
(improvement)(project)Introduce aibi-env.sh script to simplify user settings.

(improvement)(project)Introduce aibi-env.sh script to simplify user settings.
2025-02-17 14:36:15 +08:00
jerryjzhang
348d6df6a2 [fix][headless]Fix table name of SqlQuery.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-16 20:27:51 +08:00
jerryjzhang
91768892cf [fix][chat]Fix parse state when error message is returned.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-16 18:18:21 +08:00
jerryjzhang
0868a18b08 Merge branch 'master' of https://github.com/tencentmusic/supersonic 2025-02-16 15:40:13 +08:00
jerryjzhang
46316cadcf [fix][headless]Adjust none operator enum. 2025-02-16 15:38:53 +08:00
williamhliu
f804371134 (fix) 修复修改密码问题 (#2060)
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
Co-authored-by: williamhliu <williamhliu@tencent.com>
2025-02-16 10:56:51 +08:00
jerryjzhang
d6620e6ea7 [fix][headless]Adjust none operator enum. 2025-02-16 10:40:11 +08:00
jerryjzhang
cc2d6a21c2 [fix][headless]Fix NPE issue.
Some checks failed
supersonic CentOS CI / build (21) (push) Has been cancelled
supersonic mac CI / build (21) (push) Has been cancelled
supersonic ubuntu CI / build (21) (push) Has been cancelled
supersonic windows CI / build (21) (push) Has been cancelled
2025-02-14 21:32:36 +08:00
beat4ocean
19395f369a [fix][headless] Fix models cannot be deleted if related indicators/dimensions are marked as deleted. (#2056) 2025-02-14 21:31:26 +08:00
zyclove
baae7f74b8 【feat】Optimize the web app build script to include checks for the build results, preventing partial successes. (#2058) 2025-02-14 21:22:42 +08:00
zyclove
e9d9c4591d feat:add create index scripts for opensearch (#2055) 2025-02-14 21:22:01 +08:00
jerryjzhang
6cc145935d [fix][auth]Fix user registration and resetPassword issue.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-14 00:28:36 +08:00
zyclove
89e07509de 【bug】Webapp fix error TS2551: Property 'nameEn' does not exist on type 'ColumnType'. Did you mean 'name'? (#2051)
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-13 14:05:55 +08:00
zyclove
d942d35c93 feat:add opensearch (#2049)
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-12 20:30:58 +08:00
jianjun.xu
198c7c69e6 [fix][headless] The output on the front-end dimension page is abnormal after the model management is modified (#2048) 2025-02-12 20:29:17 +08:00
zyclove
cb139a54e8 feat:add openapi supports ApiVersion (#2050) 2025-02-12 20:28:10 +08:00
Hwwwww
f412ae4539 [fix][headless] Fix having and alias column not enclosed in backticks. (#2042)
Some checks failed
supersonic CentOS CI / build (21) (push) Has been cancelled
supersonic mac CI / build (21) (push) Has been cancelled
supersonic ubuntu CI / build (21) (push) Has been cancelled
supersonic windows CI / build (21) (push) Has been cancelled
2025-02-10 18:55:32 +08:00
Hwwwww
3ca46bee36 [fix][headless] Fix order by and group by not enclosed in backticks. (#2041)
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-10 12:55:05 +08:00
Hwwwww
a8157ee769 [fix][headless] Solve the problem of SQL execution error when alias is Chinese (#2039)
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-09 22:59:35 +08:00
jerryjzhang
eef7b3c443 [improvement][chat]Make a few code restructure.
Some checks failed
supersonic CentOS CI / build (21) (push) Has been cancelled
supersonic mac CI / build (21) (push) Has been cancelled
supersonic ubuntu CI / build (21) (push) Has been cancelled
supersonic windows CI / build (21) (push) Has been cancelled
2025-02-08 14:39:23 +08:00
jerryjzhang
c34b85c8a4 [improvement][chat]Introduce new chat workflow state.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-08 09:24:23 +08:00
159 changed files with 18526 additions and 13821 deletions

476
LICENSE
View File

@@ -1,481 +1,41 @@
SuperSonic is licensed under the MIT License, you can freely use or integrate SuperSonic within
your organization. However, if you want to provide or integrate SuperSonic to third parties
as a commercial software or service, you must contact the producer to obtain a commercial license.
Please contact jerryjzhang@tencent.com by email to inquire about licensing matters.
Apache License Version 2.0
As a SuperSonic contributor, you should agree that:
Copyright (2025) The SuperSonic Project Authors. All rights reserved.
a. The producer can adjust the open-source agreement to be stricter or relaxed as deemed necessary.
b. Your contributed code may be used for commercial purposes, including but not limited to its business operations.
----------
Terms of the MIT License:
--------------------------------------------------------------------
MIT License
SuperSonic is licensed under the Apache License 2.0, with the following additional conditions:
Copyright (c) 2023 Tencent Music Entertainment
1. The commercial usage of SuperSonic:
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
a. SuperSonic may be utilized commercially, including as a frontend and backend service without modifying the source
code and logo.
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
b. a commercial license must be obtained from the author if you want to develop and distribute a derivative work based
on SuperSonic.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Please contact zhangjun2915@163.com by email to inquire about licensing matters.
END OF TERMS AND CONDITIONS
2. As a contributor, you should agree that:
Other dependencies and licenses:
a. The producer can adjust the open-source agreement to be more strict or relaxed as deemed necessary.
b. Your contributed code may be used for commercial purposes, including but not limited to its cloud edition.
Open Source Software Licensed under the MIT License:
--------------------------------------------------------------------
1. Mybatis-PageHelper 1.2.10
Copyright (c) 2014-2022 abel533@gmail.com
Apart from the specific conditions mentioned above, all other rights and restrictions follow the Apache License 2.0.
Detailed information about the Apache License 2.0 can be found at http://www.apache.org/licenses/LICENSE-2.0.
2. lombok
Copyright (C) 2009-2021 The Project Lombok Authors.
----------
3. react
Copyright (c) Facebook, Inc. and its affiliates.
4. ant-design
Copyright (c) 2015-present Ant UED, https://xtech.antfin.com/
5. ant-design-pro
Copyright (c) 2019 Alipay.inc
6. @ant-design/charts
Copyright (c) 2021 Ant Design
7. @ant-design/icons
Copyright (c) 2018-present Ant UED, https://xtech.antfin.com/
8. @antv/layout
Copyright (c) 2018 Alipay.inc
9. @antv/xflow
Copyright (c) 2021-2023 Alipay.inc
10. umi
Copyright (c) 2017-present ChenCheng (sorrycc@gmail.com)
11. @umijs/route-utils
Copyright (c) 2019-present chenshuai2144 (qixian.cs@outlook.com)
12. ahooks
Copyright (c) 2020 ahooks
13. axios
Copyright (c) 2014-present Matt Zabriskie & Collaborators
14. classnames
Copyright (c) 2018 Jed Watson
15. crypto-js
Copyright (c) 2009-2013 Jeff Mott
Copyright (c) 2013-2016 Evan Vosberg
16. immutability-helper
Copyright (c) 2017 Moshe Kolodny
17. lodash
Copyright JS Foundation and other contributors <https://js.foundation/>
18. moment
Copyright (c) JS Foundation and other contributors
19. numeral
Copyright (c) 2016 Adam Draper
20. omit.js
Copyright (c) 2016 Benjy Cui
21. rc-menu
Copyright (c) 2014-present yiminghe
22. rc-util
Copyright (c) 2014-present yiminghe
Copyright (c) 2015-present Alipay.com, https://www.alipay.com/
23. react-ace
Copyright (c) 2014 James Hrisho
24. react-dev-inspector
Copyright (c) zthxxx (https://blog.zthxxx.me)
25. react-lazyload
Copyright (c) 2015 Sen Yang
26. react-spinners
Copyright (c) 2017 David Hu
27.react-split-pane
Copyright (c) 2015 tomkp
28. snappyjs
Copyright (c) 2016 Zhipeng Jia
29. sql-formatter
Copyright (c) 2016-2020 ZeroTurnaround LLC
Copyright (c) 2020-2021 George Leslie-Waksman and other contributors
Copyright (c) 2021-Present inferrinizzard and other contributors
30. @ant-design/pro-cli
Copyright (c) 2017-2018 Alipay
31. cross-env
Copyright (c) 2017 Kent C. Dodds
32.cross-port-killer
Copyright (c) 2017 Rafael Milewski
33.detect-installer
Copyright (c) 2019-present chenshuai2144 (qixian.cs@outlook.com)
34.eslint
Copyright OpenJS Foundation and other contributors, <www.openjsf.org>
35.express
Copyright (c) 2009-2014 TJ Holowaychuk <tj@vision-media.ca>
Copyright (c) 2013-2014 Roman Shtylman <shtylman+expressjs@gmail.com>
Copyright (c) 2014-2015 Douglas Christopher Wilson <doug@somethingdoug.com>
36.gh-pages
Copyright (c) 2014 Tim Schaub
37.inflect
Copyright (C) 2020 Pavan Kumar Sunkara
38.lint-staged
Copyright (c) 2016 Andrey Okonetchnikov
39.prettier
Copyright © James Long and contributors
40.stylelint
Copyright (c) 2015 - present Maxime Thirouin, David Clark & Richard Hallows
41.umi-serve
Copyright (c) 2017-present ChenCheng (sorrycc@gmail.com)
42.webpack
Copyright JS Foundation and other contributors
43.react-dnd
Copyright (c) 2015 Dan Abramov
44.react-grid-layout
Copyright (c) 2016 Samuel Reed
45.slat
Copyright © 20162023, Ian Storm Taylor
46.html2canvas
Copyright (c) 2012 Niklas von Hertzen
47.core-js
Copyright (c) 2014-2020 Denis Pushkarev
48.immer 4.0.2
Copyright (c) 2017 Michel Weststrate
49.redux
Copyright (c) 2015-present Dan Abramov
The Redux logo is dedicated to the public domain and licensed under CC0.
50.redux-saga
Copyright (c) 2015 Yassine Elouafi
The Redux-Saga logo is dedicated to the public domain and licensed under CC0.
51.ts-loader
Copyright (c) 2015 TypeStrong
52.minimist
Fileshttps://github.com/minimistjs/minimist/tree/v1.2.3
License Detailshttps://github.com/minimistjs/minimist/blob/main/LICENSE
53.intl
copyright (c) 2013 Andy Earnshaw
A copy of the MIT License is included in this file.
Open Source Software Licensed under the Apache License Version 2.0:
--------------------------------------------------------------------
1. HanLP
Files: https://github.com/hankcs/HanLP/tree/v1.8.3
License Details: https://github.com/hankcs/HanLP/blob/v1.8.3/LICENSE
2. mybatis
iBATIS
This product includes software developed by
The Apache Software Foundation (http://www.apache.org/).
Copyright 2010 The Apache Software Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
3. guava
Files: https://github.com/google/guava/tree/v20.0
License Details: https://github.com/google/guava/blob/master/LICENSE
4. hadoop
This product includes software developed by The Apache Software Foundation (http://www.apache.org/).
5. Jackson
Files: https://github.com/FasterXML/jackson-core/tree/2.11
License Details: https://github.com/FasterXML/jackson-core/blob/2.11/LICENSE
6. commons-lang
Apache Commons Lang
Copyright 2001-2017 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).
This product includes software from the Spring Framework,
under the Apache License 2.0 (see: StringUtils.containsWhitespace())
7. testng
Fileshttps://github.com/testng-team/testng/tree/6.13.1
License Detailshttps://github.com/testng-team/testng/blob/6.13.1/LICENSE.txt
8. jackson-dataformat-yaml
Fileshttps://github.com/FasterXML/jackson-dataformat-yaml/tree/jackson-dataformat-yaml-2.8.11
License Detailshttps://www.apache.org/licenses/LICENSE-2.0.txt
9. druid
Copyright 1999-2018 Alibaba Group Holding Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
10. davinci
Licensed to Apereo under one or more contributor license
agreements. See the NOTICE file distributed with this work
for additional information regarding copyright ownership.
Apereo licenses this file to you under the Apache License,
Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a
copy of the License at the following location:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
11. echarts
Apache ECharts
Copyright 2017-2023 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (https://www.apache.org/).
12. echarts-wordcloud
Apache ECharts
Copyright 2017-2023 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (https://www.apache.org/).
13. carlo
Fileshttps://github.com/GoogleChromeLabs/carlo
License Detailshttps://github.com/GoogleChromeLabs/carlo/blob/master/LICENSE
14. puppeteer-core
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
15. swagger-ui-react
swagger-ui
Copyright 2020-2021 SmartBear Software Inc.
16. typescript
fileshttps://github.com/microsoft/TypeScript
License Detailshttps://github.com/microsoft/TypeScript/blob/main/LICENSE.txt
17. io.jsonwebtoken
Copyright (C) 2014 jsonwebtoken.io
Files: https://repo1.maven.org/maven2/io/jsonwebtoken/jjwt/0.9.1/jjwt-0.9.1.jar
Terms of the Apache License Version 2.0:
--------------------------------------------------------------------
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
You must give any other recipients of the Work or Derivative Works a copy of this License; and
You must cause any modified files to carry prominent notices stating that You changed the files; and
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
Open Source Software Licensed under the Modified BSD License:
--------------------------------------------------------------------
1. node-sha1
Copyright © 2009, Jeff Mott. All rights reserved.
Copyright © 2011, Paul Vorbach. All rights reserved.
This project is licensed under the terms of the Modified BSD License, as follows:
-------------------------------------------------------------------
Copyright (c) 2005-2023, NumPy Developers.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name oCrypto-JS nor the names of any contributors
may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
2. ace-builds
Copyright (c) 2010, Ajax.org B.V.
All rights reserved.
This project is licensed under the terms of the Modified BSD License, as follows:
-------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Ajax.org B.V. nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Other Open Source Software
--------------------------------------------------------------------
1. jsencrypt
Fileshttps://github.com/travist/jsencrypt
License Detailshttps://github.com/travist/jsencrypt/blob/master/LICENSE.txt
limitations under the License.

View File

@@ -35,12 +35,17 @@ function buildWebapp {
chmod +x $projectDir/webapp/start-fe-prod.sh
cd $projectDir/webapp
sh ./start-fe-prod.sh
cp -fr ./supersonic-webapp.tar.gz ${buildDir}/
# check build result
if [ $? -ne 0 ]; then
echo "Failed to build frontend webapp."
exit 1
fi
cp -fr ./supersonic-webapp.tar.gz ${buildDir}/
# check build result
if [ $? -ne 0 ]; then
echo "Failed to get supersonic webapp package."
exit 1
fi
echo "finished building supersonic webapp"
}
@@ -56,6 +61,11 @@ function packageRelease {
# package webapp
tar xvf supersonic-webapp.tar.gz
mv supersonic-webapp webapp
# check webapp build result
if [ $? -ne 0 ]; then
echo "Failed to get supersonic webapp package."
exit 1
fi
json='{"env": "''"}'
echo $json > webapp/supersonic.config.json
mv webapp $release_dir/

View File

@@ -4,6 +4,7 @@ chcp 65001
set "sbinDir=%~dp0"
call %sbinDir%/supersonic-common.bat %*
call %sbinDir%/supersonic-env.bat %*
set "command=%~1"
set "service=%~2"
@@ -14,7 +15,7 @@ if "%service%"=="" (
)
if "%profile%"=="" (
set "profile=local"
set "profile=%S2_DB_TYPE%"
)
set "model_name=%service%"
@@ -54,7 +55,8 @@ if "%command%"=="restart" (
set "webDir=%baseDir%\webapp"
set "logDir=%baseDir%\logs"
set "classpath=%baseDir%;%webDir%;%libDir%\*;%confDir%"
set "java-command=-Dfile.encoding=UTF-8 -Duser.language=Zh -Duser.region=CN -Duser.timezone=GMT+08 -Dspring.profiles.active=%profile% -Xms1024m -Xmx1024m -cp %CLASSPATH% %MAIN_CLASS%"
set "property=-Dfile.encoding=UTF-8 -Duser.language=Zh -Duser.region=CN -Duser.timezone=GMT+08 -Dspring.profiles.active=%profile%"
set "java-command=%property% -Xms1024m -Xmx1024m -cp %CLASSPATH% %MAIN_CLASS%"
if not exist %logDir% mkdir %logDir%
start /B java %java-command% >nul 2>&1
timeout /t 10 >nul

View File

@@ -2,6 +2,7 @@
sbinDir=$(cd "$(dirname "$0")"; pwd)
source $sbinDir/supersonic-common.sh
source $sbinDir/supersonic-env.sh
command=$1
service=$2
@@ -12,7 +13,7 @@ if [ -z "$service" ]; then
fi
if [ -z "$profile" ]; then
profile="local"
profile=${S2_DB_TYPE}
fi
model_name=$service

View File

@@ -0,0 +1,8 @@
:: Set below DB configs to connect to your own database
:: Supported DB_TYPE: h2, mysql, postgres
set "S2_DB_TYPE=h2"
set "S2_DB_HOST="
set "S2_DB_PORT="
set "S2_DB_USER="
set "S2_DB_PASSWORD="
set "S2_DB_DATABASE="

10
assembly/bin/supersonic-env.sh Executable file
View File

@@ -0,0 +1,10 @@
#!/usr/bin/env bash
#### Set below DB configs to connect to your own database
# Supported DB_TYPE: h2, mysql, postgres
export S2_DB_TYPE=h2
export S2_DB_HOST=
export S2_DB_PORT=
export S2_DB_USER=
export S2_DB_PASSWORD=
export S2_DB_DATABASE=

View File

@@ -12,4 +12,7 @@ public class UserReq {
@NotBlank(message = "password can not be null")
private String password;
@NotBlank(message = "password can not be null")
private String newPassword;
}

View File

@@ -1,7 +1,13 @@
package com.tencent.supersonic.auth.authentication.persistence.dataobject;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
@TableName("s2_user")
public class UserDO {
/** */
@TableId(type = IdType.AUTO)
private Long id;
/** */

View File

@@ -1,5 +1,7 @@
package com.tencent.supersonic.auth.authentication.persistence.mapper;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.tencent.supersonic.auth.authentication.persistence.dataobject.UserDO;
import com.tencent.supersonic.auth.authentication.persistence.dataobject.UserDOExample;
import org.apache.ibatis.annotations.Mapper;
@@ -7,12 +9,8 @@ import org.apache.ibatis.annotations.Mapper;
import java.util.List;
@Mapper
public interface UserDOMapper {
public interface UserDOMapper extends BaseMapper<UserDO> {
/** @mbg.generated */
int insert(UserDO record);
/** @mbg.generated */
List<UserDO> selectByExample(UserDOExample example);
void updateByPrimaryKey(UserDO userDO);

View File

@@ -72,6 +72,13 @@ public class UserController {
return userService.login(userCmd, request);
}
@PostMapping("/resetPassword")
public void resetPassword(@RequestBody UserReq userCmd, HttpServletRequest request,
HttpServletResponse response) {
User user = userService.getCurrentUser(request, response);
userService.resetPassword(user.getName(), userCmd.getPassword(), userCmd.getNewPassword());
}
@PostMapping("/generateToken")
public UserToken generateToken(@RequestBody UserTokenReq userTokenReq,
HttpServletRequest request, HttpServletResponse response) {

View File

@@ -59,14 +59,6 @@
limit #{limitStart} , #{limitEnd}
</if>
</select>
<insert id="insert" parameterType="com.tencent.supersonic.auth.authentication.persistence.dataobject.UserDO">
insert into s2_user (id, name, password, salt,
display_name, email, is_admin
)
values (#{id,jdbcType=BIGINT}, #{name,jdbcType=VARCHAR}, #{password,jdbcType=VARCHAR}, #{salt,jdbcType=VARCHAR},
#{displayName,jdbcType=VARCHAR}, #{email,jdbcType=VARCHAR}, #{isAdmin,jdbcType=INTEGER}
)
</insert>
<insert id="insertSelective" parameterType="com.tencent.supersonic.auth.authentication.persistence.dataobject.UserDO">
insert into s2_user
<trim prefix="(" suffix=")" suffixOverrides=",">

View File

@@ -38,6 +38,9 @@ public class Agent extends RecordInfo {
private VisualConfig visualConfig;
private List<String> admins = Lists.newArrayList();
private List<String> viewers = Lists.newArrayList();
private List<String> adminOrgs = Lists.newArrayList();
private List<String> viewOrgs = Lists.newArrayList();
private Integer isOpen = 0;
public List<String> getTools(AgentToolType type) {
Map<String, Object> map = JSONObject.parseObject(toolConfig, Map.class);
@@ -115,4 +118,8 @@ public class Agent extends RecordInfo {
return list.apply(this).contains(user.getName());
}
public boolean openToAll() {
return isOpen != null && isOpen == 1;
}
}

View File

@@ -44,4 +44,10 @@ public class AgentDO {
private String admin;
private String viewer;
private String adminOrg;
private String viewOrg;
private Integer isOpen;
}

View File

@@ -4,6 +4,7 @@ import com.tencent.supersonic.chat.server.pojo.ParseContext;
import com.tencent.supersonic.common.pojo.ChatApp;
import com.tencent.supersonic.common.pojo.enums.AppModule;
import com.tencent.supersonic.common.util.ChatAppManager;
import com.tencent.supersonic.headless.api.pojo.response.ParseResp;
import com.tencent.supersonic.headless.server.utils.ModelConfigHelper;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
@@ -26,7 +27,7 @@ public class ErrorMsgRewriteProcessor implements ParseResultProcessor {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
public static final String APP_KEY_ERROR_MESSAGE = "REWRITE_ERROR_MESSAGE";
public static final String APP_KEY = "REWRITE_ERROR_MESSAGE";
private static final String REWRITE_ERROR_MESSAGE_INSTRUCTION = ""
+ "#Role: You are a data business partner who closely interacts with business people.\n"
+ "#Task: Your will be provided with user input, system output and some examples, "
@@ -37,7 +38,7 @@ public class ErrorMsgRewriteProcessor implements ParseResultProcessor {
+ "#Examples: {{examples}}\n" + "#Response: ";
public ErrorMsgRewriteProcessor() {
ChatAppManager.register(APP_KEY_ERROR_MESSAGE,
ChatAppManager.register(APP_KEY,
ChatApp.builder().prompt(REWRITE_ERROR_MESSAGE_INSTRUCTION).name("异常提示改写")
.appModule(AppModule.CHAT).description("通过大模型将异常信息改写为更友好和引导性的提示用语")
.enable(true).build());
@@ -45,7 +46,7 @@ public class ErrorMsgRewriteProcessor implements ParseResultProcessor {
@Override
public boolean accept(ParseContext parseContext) {
ChatApp chatApp = parseContext.getAgent().getChatAppConfig().get(APP_KEY_ERROR_MESSAGE);
ChatApp chatApp = parseContext.getAgent().getChatAppConfig().get(APP_KEY);
return StringUtils.isNotBlank(parseContext.getResponse().getErrorMsg())
&& Objects.nonNull(chatApp) && chatApp.isEnable();
}
@@ -53,16 +54,20 @@ public class ErrorMsgRewriteProcessor implements ParseResultProcessor {
@Override
public void process(ParseContext parseContext) {
String errMsg = parseContext.getResponse().getErrorMsg();
ChatApp chatApp = parseContext.getAgent().getChatAppConfig().get(APP_KEY_ERROR_MESSAGE);
ChatApp chatApp = parseContext.getAgent().getChatAppConfig().get(APP_KEY);
Map<String, Object> variables = new HashMap<>();
variables.put("user_question", parseContext.getRequest().getQueryText());
variables.put("system_message", errMsg);
StringBuilder exampleStr = new StringBuilder();
parseContext.getResponse().getUsedExemplars().forEach(e -> exampleStr.append(
String.format("<Question:{%s},Schema:{%s}> ", e.getQuestion(), e.getDbSchema())));
parseContext.getAgent().getExamples()
.forEach(e -> exampleStr.append(String.format("<Question:{%s}> ", e)));
if (parseContext.getResponse().getUsedExemplars() != null) {
parseContext.getResponse().getUsedExemplars().forEach(e -> exampleStr.append(String
.format("<Question:{%s},Schema:{%s}> ", e.getQuestion(), e.getDbSchema())));
}
if (parseContext.getAgent().getExamples() != null) {
parseContext.getAgent().getExamples()
.forEach(e -> exampleStr.append(String.format("<Question:{%s}> ", e)));
}
variables.put("examples", exampleStr);
Prompt prompt = PromptTemplate.from(chatApp.getPrompt()).apply(variables);
@@ -71,6 +76,7 @@ public class ErrorMsgRewriteProcessor implements ParseResultProcessor {
Response<AiMessage> response = chatLanguageModel.generate(prompt.toUserMessage());
String rewrittenMsg = response.content().text();
parseContext.getResponse().setErrorMsg(rewrittenMsg);
parseContext.getResponse().setState(ParseResp.ParseState.FAILED);
keyPipelineLog.info("ErrorMessageProcessor modelReq:\n{} \nmodelResp:\n{}", prompt.text(),
rewrittenMsg);
}

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.chat.server.service.impl;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.tencent.supersonic.auth.api.authentication.service.UserService;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryFilter;
import com.tencent.supersonic.chat.api.pojo.request.ChatParseReq;
import com.tencent.supersonic.chat.server.agent.Agent;
@@ -26,6 +27,7 @@ import org.springframework.util.CollectionUtils;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.stream.Collectors;
@@ -42,6 +44,9 @@ public class AgentServiceImpl extends ServiceImpl<AgentDOMapper, AgentDO> implem
@Autowired
private ChatModelService chatModelService;
@Autowired
private UserService userService;
@Autowired
@Qualifier("chatExecutor")
private ThreadPoolExecutor executor;
@@ -53,17 +58,19 @@ public class AgentServiceImpl extends ServiceImpl<AgentDOMapper, AgentDO> implem
}
private boolean filterByAuth(Agent agent, User user, AuthType authType) {
if (user.isSuperAdmin() || user.getName().equals(agent.getCreatedBy())) {
Set<String> orgIds = userService.getUserAllOrgId(user.getName());
if (user.isSuperAdmin() || agent.openToAll()
|| user.getName().equals(agent.getCreatedBy())) {
return true;
}
authType = authType == null ? AuthType.VIEWER : authType;
switch (authType) {
case ADMIN:
return agent.contains(user, Agent::getAdmins);
return checkAdminPermission(orgIds, user, agent);
case VIEWER:
default:
return agent.contains(user, Agent::getAdmins)
|| agent.contains(user, Agent::getViewers);
return checkViewPermission(orgIds, user, agent);
}
}
@@ -161,6 +168,9 @@ public class AgentServiceImpl extends ServiceImpl<AgentDOMapper, AgentDO> implem
});
agent.setAdmins(JsonUtil.toList(agentDO.getAdmin(), String.class));
agent.setViewers(JsonUtil.toList(agentDO.getViewer(), String.class));
agent.setAdminOrgs(JsonUtil.toList(agentDO.getAdminOrg(), String.class));
agent.setViewOrgs(JsonUtil.toList(agentDO.getViewOrg(), String.class));
agent.setIsOpen(agentDO.getIsOpen());
return agent;
}
@@ -173,9 +183,56 @@ public class AgentServiceImpl extends ServiceImpl<AgentDOMapper, AgentDO> implem
agentDO.setVisualConfig(JsonUtil.toString(agent.getVisualConfig()));
agentDO.setAdmin(JsonUtil.toString(agent.getAdmins()));
agentDO.setViewer(JsonUtil.toString(agent.getViewers()));
agentDO.setAdminOrg(JsonUtil.toString(agent.getAdminOrgs()));
agentDO.setViewOrg(JsonUtil.toString(agent.getViewOrgs()));
agentDO.setIsOpen(agent.getIsOpen());
if (agentDO.getStatus() == null) {
agentDO.setStatus(1);
}
return agentDO;
}
private boolean checkAdminPermission(Set<String> orgIds, User user, Agent agent) {
List<String> admins = agent.getAdmins();
List<String> adminOrgs = agent.getAdminOrgs();
if (user.isSuperAdmin()) {
return true;
}
if (admins.contains(user.getName()) || agent.getCreatedBy().equals(user.getName())) {
return true;
}
if (CollectionUtils.isEmpty(adminOrgs)) {
return false;
}
for (String orgId : orgIds) {
if (adminOrgs.contains(orgId)) {
return true;
}
}
return false;
}
private boolean checkViewPermission(Set<String> orgIds, User user, Agent agent) {
if (checkAdminPermission(orgIds, user, agent)) {
return true;
}
List<String> viewers = agent.getViewers();
List<String> viewOrgs = agent.getViewOrgs();
if (agent.openToAll()) {
return true;
}
if (viewers.contains(user.getName())) {
return true;
}
if (CollectionUtils.isEmpty(viewOrgs)) {
return false;
}
for (String orgId : orgIds) {
if (viewOrgs.contains(orgId)) {
return true;
}
}
return false;
}
}

View File

@@ -174,6 +174,10 @@
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-milvus</artifactId>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-opensearch</artifactId>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-pgvector</artifactId>
@@ -242,6 +246,10 @@
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.woodstox</groupId>

View File

@@ -17,9 +17,10 @@ import java.util.List;
public class EmbeddingStoreParameterConfig extends ParameterConfig {
private static final String MODULE_NAME = "向量数据库配置";
public static final Parameter EMBEDDING_STORE_PROVIDER = new Parameter(
"s2.embedding.store.provider", EmbeddingStoreType.IN_MEMORY.name(), "向量库类型",
"目前支持四种类型IN_MEMORY、MILVUS、CHROMA、PGVECTOR", "list", MODULE_NAME, getCandidateValues());
public static final Parameter EMBEDDING_STORE_PROVIDER =
new Parameter("s2.embedding.store.provider", EmbeddingStoreType.IN_MEMORY.name(),
"向量库类型", "目前支持四种类型IN_MEMORY、MILVUS、CHROMA、PGVECTOR、OPENSEARCH", "list",
MODULE_NAME, getCandidateValues());
public static final Parameter EMBEDDING_STORE_BASE_URL =
new Parameter("s2.embedding.store.base.url", "", "BaseUrl", "", "string", MODULE_NAME,
@@ -87,16 +88,18 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
private static ArrayList<String> getCandidateValues() {
return Lists.newArrayList(EmbeddingStoreType.IN_MEMORY.name(),
EmbeddingStoreType.MILVUS.name(), EmbeddingStoreType.CHROMA.name(),
EmbeddingStoreType.PGVECTOR.name());
EmbeddingStoreType.PGVECTOR.name(), EmbeddingStoreType.OPENSEARCH.name());
}
private static List<Parameter.Dependency> getBaseUrlDependency() {
return getDependency(EMBEDDING_STORE_PROVIDER.getName(),
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
EmbeddingStoreType.CHROMA.name(), EmbeddingStoreType.PGVECTOR.name()),
EmbeddingStoreType.CHROMA.name(), EmbeddingStoreType.PGVECTOR.name(),
EmbeddingStoreType.OPENSEARCH.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "http://localhost:19530",
EmbeddingStoreType.CHROMA.name(), "http://localhost:8000",
EmbeddingStoreType.PGVECTOR.name(), "127.0.0.1"));
EmbeddingStoreType.PGVECTOR.name(), "127.0.0.1",
EmbeddingStoreType.OPENSEARCH.name(), "http://localhost:9200"));
}
private static List<Parameter.Dependency> getApiKeyDependency() {
@@ -114,17 +117,19 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
private static List<Parameter.Dependency> getDimensionDependency() {
return getDependency(EMBEDDING_STORE_PROVIDER.getName(),
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
EmbeddingStoreType.PGVECTOR.name()),
EmbeddingStoreType.PGVECTOR.name(), EmbeddingStoreType.OPENSEARCH.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "384",
EmbeddingStoreType.PGVECTOR.name(), "512"));
EmbeddingStoreType.PGVECTOR.name(), "512",
EmbeddingStoreType.OPENSEARCH.name(), "512"));
}
private static List<Parameter.Dependency> getDatabaseNameDependency() {
return getDependency(EMBEDDING_STORE_PROVIDER.getName(),
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
EmbeddingStoreType.PGVECTOR.name()),
EmbeddingStoreType.PGVECTOR.name(), EmbeddingStoreType.OPENSEARCH.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "",
EmbeddingStoreType.PGVECTOR.name(), "postgres"));
EmbeddingStoreType.PGVECTOR.name(), "postgres",
EmbeddingStoreType.OPENSEARCH.name(), "ai_sql"));
}
private static List<Parameter.Dependency> getPortDependency() {
@@ -136,16 +141,18 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
private static List<Parameter.Dependency> getUserDependency() {
return getDependency(EMBEDDING_STORE_PROVIDER.getName(),
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
EmbeddingStoreType.PGVECTOR.name()),
EmbeddingStoreType.PGVECTOR.name(), EmbeddingStoreType.OPENSEARCH.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "milvus",
EmbeddingStoreType.PGVECTOR.name(), "postgres"));
EmbeddingStoreType.PGVECTOR.name(), "postgres",
EmbeddingStoreType.OPENSEARCH.name(), "opensearch"));
}
private static List<Parameter.Dependency> getPasswordDependency() {
return getDependency(EMBEDDING_STORE_PROVIDER.getName(),
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
EmbeddingStoreType.PGVECTOR.name()),
EmbeddingStoreType.PGVECTOR.name(), EmbeddingStoreType.OPENSEARCH.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "milvus",
EmbeddingStoreType.PGVECTOR.name(), "postgres"));
EmbeddingStoreType.PGVECTOR.name(), "postgres",
EmbeddingStoreType.OPENSEARCH.name(), "opensearch"));
}
}

View File

@@ -0,0 +1,43 @@
package com.tencent.supersonic.common.jsqlparser;
import net.sf.jsqlparser.expression.Alias;
import net.sf.jsqlparser.statement.select.SelectItem;
import net.sf.jsqlparser.statement.select.SelectItemVisitorAdapter;
import org.apache.commons.lang3.StringUtils;
import java.util.HashMap;
import java.util.Map;
public class FieldAliasReplaceWithBackticksVisitor extends SelectItemVisitorAdapter {
private Map<String, String> fieldAliasReplacedMap = new HashMap<>();
@Override
public void visit(SelectItem selectExpressionItem) {
Alias alias = selectExpressionItem.getAlias();
if (alias == null) {
return;
}
String aliasName = alias.getName();
String replaceValue = addBackticks(aliasName);
if (StringUtils.isBlank(replaceValue)) {
return;
}
alias.setName(replaceValue);
fieldAliasReplacedMap.put(aliasName, replaceValue);
}
private String addBackticks(String aliasName) {
if (StringUtils.isBlank(aliasName)) {
return "";
}
if (aliasName.startsWith("`") && aliasName.endsWith("`")) {
return "";
}
return "`" + aliasName + "`";
}
public Map<String, String> getFieldAliasReplacedMap() {
return fieldAliasReplacedMap;
}
}

View File

@@ -49,6 +49,10 @@ public class QueryExpressionReplaceVisitor extends ExpressionVisitorAdapter {
visitBinaryExpression(binaryExpression);
}
if (expression instanceof Parenthesis) {
replace(expression, fieldExprMap);
}
if (!toReplace.isEmpty()) {
Expression toReplaceExpr = getExpression(toReplace);
if (Objects.nonNull(toReplaceExpr)) {

View File

@@ -38,6 +38,10 @@ public class SqlReplaceHelper {
private final static double replaceColumnThreshold = 0.4;
public static String escapeTableName(String table) {
return String.format("`%s`", table);
}
public static String replaceAggFields(String sql,
Map<String, Pair<String, String>> fieldNameToAggMap) {
Select selectStatement = SqlSelectHelper.getSelect(sql);
@@ -486,6 +490,52 @@ public class SqlReplaceHelper {
return selectStatement.toString();
}
public static String replaceAliasWithBackticks(String sql) {
Select selectStatement = SqlSelectHelper.getSelect(sql);
if (!(selectStatement instanceof PlainSelect)) {
return sql;
}
PlainSelect plainSelect = (PlainSelect) selectStatement;
FieldAliasReplaceWithBackticksVisitor visitor = new FieldAliasReplaceWithBackticksVisitor();
for (SelectItem selectItem : plainSelect.getSelectItems()) {
selectItem.accept(visitor);
}
// Replace `order by` and `group by`
// Get the map of field aliases that have been replaced
Map<String, String> aliasReplacedMap = visitor.getFieldAliasReplacedMap();
// If no aliases have been replaced, return the original SQL statement as a string
if (aliasReplacedMap.isEmpty()) {
return selectStatement.toString();
}
// Order by elements
List<OrderByElement> orderByElements = selectStatement.getOrderByElements();
if (!CollectionUtils.isEmpty(orderByElements)) {
for (OrderByElement orderByElement : orderByElements) {
orderByElement.accept(new OrderByReplaceVisitor(aliasReplacedMap, true));
}
}
// Group by elements
GroupByElement groupByElement = plainSelect.getGroupBy();
if (Objects.nonNull(groupByElement)) {
groupByElement.accept(new GroupByReplaceVisitor(aliasReplacedMap, true));
}
// Alias columns
for (SelectItem<?> selectItem : plainSelect.getSelectItems()) {
if (selectItem.getExpression() instanceof Column) {
replaceColumn((Column) selectItem.getExpression(), aliasReplacedMap, true);
}
}
// Having
Expression having = plainSelect.getHaving();
if (Objects.nonNull(having)) {
ExpressionReplaceVisitor expressionReplaceVisitor =
new ExpressionReplaceVisitor(aliasReplacedMap);
having.accept(expressionReplaceVisitor);
}
return selectStatement.toString();
}
public static String replaceAlias(String sql) {
Select selectStatement = SqlSelectHelper.getSelect(sql);
if (!(selectStatement instanceof PlainSelect)) {

View File

@@ -228,7 +228,7 @@ public class SqlSelectHelper {
statement = CCJSqlParserUtil.parse(sql);
} catch (JSQLParserException e) {
log.error("parse error, sql:{}", sql, e);
return null;
throw new RuntimeException(e);
}
if (statement instanceof ParenthesedSelect) {
@@ -294,7 +294,8 @@ public class SqlSelectHelper {
}
// do not account in aliases
results.removeAll(aliases);
return new ArrayList<>(results);
return new ArrayList<>(
results.stream().map(r -> r.replaceAll("`", "")).collect(Collectors.toList()));
}
private static List<String> getFieldsByPlainSelect(PlainSelect plainSelect) {

View File

@@ -18,6 +18,7 @@ public class ChatModelConfig implements Serializable {
private String baseUrl;
private String apiKey;
private String modelName;
private String apiVersion;
private Double temperature = 0.0d;
private Long timeOut = 60L;
private String endpoint;

View File

@@ -34,6 +34,9 @@ public class ChatModelParameters {
public static final Parameter CHAT_MODEL_API_KEY = new Parameter("apiKey", "", "ApiKey", "",
"password", MODULE_NAME, null, getApiKeyDependency());
public static final Parameter CHAT_MODEL_API_VERSION = new Parameter("apiVersion", "2024-02-01",
"ApiVersion", "", "string", MODULE_NAME, null, getApiVersionDependency());
public static final Parameter CHAT_MODEL_ENDPOINT = new Parameter("endpoint", "llama_2_70b",
"Endpoint", "", "string", MODULE_NAME, null, getEndpointDependency());
@@ -51,7 +54,7 @@ public class ChatModelParameters {
public static List<Parameter> getParameters() {
return Lists.newArrayList(CHAT_MODEL_PROVIDER, CHAT_MODEL_BASE_URL, CHAT_MODEL_ENDPOINT,
CHAT_MODEL_API_KEY, CHAT_MODEL_SECRET_KEY, CHAT_MODEL_NAME,
CHAT_MODEL_API_KEY, CHAT_MODEL_SECRET_KEY, CHAT_MODEL_NAME, CHAT_MODEL_API_VERSION,
CHAT_MODEL_ENABLE_SEARCH, CHAT_MODEL_TEMPERATURE, CHAT_MODEL_TIMEOUT);
}
@@ -90,6 +93,12 @@ public class ChatModelParameters {
ModelProvider.DEMO_CHAT_MODEL.getApiKey()));
}
private static List<Parameter.Dependency> getApiVersionDependency() {
return getDependency(CHAT_MODEL_PROVIDER.getName(),
Lists.newArrayList(OpenAiModelFactory.PROVIDER), ImmutableMap
.of(OpenAiModelFactory.PROVIDER, OpenAiModelFactory.DEFAULT_API_VERSION));
}
private static List<Parameter.Dependency> getModelNameDependency() {
return getDependency(CHAT_MODEL_PROVIDER.getName(), getCandidateValues(),
ImmutableMap.of(OpenAiModelFactory.PROVIDER, OpenAiModelFactory.DEFAULT_MODEL_NAME,

View File

@@ -5,6 +5,7 @@ import com.tencent.supersonic.common.util.DateUtils;
import lombok.Data;
import org.springframework.util.CollectionUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
@@ -12,7 +13,7 @@ import java.util.Objects;
import static java.time.LocalDate.now;
@Data
public class DateConf {
public class DateConf implements Serializable {
private static final long serialVersionUID = 3074129990945004340L;

View File

@@ -3,11 +3,13 @@ package com.tencent.supersonic.common.pojo;
import com.google.common.base.Objects;
import jakarta.validation.constraints.NotBlank;
import lombok.Data;
import java.io.Serializable;
import static com.tencent.supersonic.common.pojo.Constants.ASC_UPPER;
@Data
public class Order {
public class Order implements Serializable {
private static final long serialVersionUID = 1L;
@NotBlank(message = "Invalid order column")
private String column;

View File

@@ -2,8 +2,11 @@ package com.tencent.supersonic.common.pojo;
import lombok.Data;
import java.io.Serializable;
@Data
public class PageBaseReq {
public class PageBaseReq implements Serializable {
private static final long serialVersionUID = 1L;
private static final Integer MAX_PAGESIZE = 100;
private Integer current = 1;

View File

@@ -1,7 +1,7 @@
package com.tencent.supersonic.common.pojo.enums;
public enum AggOperatorEnum {
ANY("ANY"),
NONE(""),
MAX("MAX"),

View File

@@ -9,7 +9,11 @@ public enum EngineType {
POSTGRESQL(6, "POSTGRESQL"),
OTHER(7, "OTHER"),
DUCKDB(8, "DUCKDB"),
HANADB(9, "HANADB");
HANADB(9, "HANADB"),
STARROCKS(10, "STARROCKS"),
KYUUBI(11, "KYUUBI"),
PRESTO(12, "PRESTO"),
TRINO(13, "TRINO"),;
private Integer code;

View File

@@ -70,6 +70,7 @@ public class OpenAiChatModel implements ChatLanguageModel, TokenCountEstimator {
private final OpenAiClient client;
private final String baseUrl;
private final String modelName;
private final String apiVersion;
private final Double temperature;
private final Double topP;
private final List<String> stop;
@@ -89,12 +90,13 @@ public class OpenAiChatModel implements ChatLanguageModel, TokenCountEstimator {
@Builder
public OpenAiChatModel(String baseUrl, String apiKey, String organizationId, String modelName,
Double temperature, Double topP, List<String> stop, Integer maxTokens,
Double presencePenalty, Double frequencyPenalty, Map<String, Integer> logitBias,
String responseFormat, Boolean strictJsonSchema, Integer seed, String user,
Boolean strictTools, Boolean parallelToolCalls, Duration timeout, Integer maxRetries,
Proxy proxy, Boolean logRequests, Boolean logResponses, Tokenizer tokenizer,
Map<String, String> customHeaders, List<ChatModelListener> listeners) {
String apiVersion, Double temperature, Double topP, List<String> stop,
Integer maxTokens, Double presencePenalty, Double frequencyPenalty,
Map<String, Integer> logitBias, String responseFormat, Boolean strictJsonSchema,
Integer seed, String user, Boolean strictTools, Boolean parallelToolCalls,
Duration timeout, Integer maxRetries, Proxy proxy, Boolean logRequests,
Boolean logResponses, Tokenizer tokenizer, Map<String, String> customHeaders,
List<ChatModelListener> listeners) {
baseUrl = getOrDefault(baseUrl, OPENAI_URL);
if (OPENAI_DEMO_API_KEY.equals(apiKey)) {
@@ -105,11 +107,12 @@ public class OpenAiChatModel implements ChatLanguageModel, TokenCountEstimator {
timeout = getOrDefault(timeout, ofSeconds(60));
this.client = OpenAiClient.builder().openAiApiKey(apiKey).baseUrl(baseUrl)
.organizationId(organizationId).callTimeout(timeout).connectTimeout(timeout)
.readTimeout(timeout).writeTimeout(timeout).proxy(proxy).logRequests(logRequests)
.logResponses(logResponses).userAgent(DEFAULT_USER_AGENT)
.apiVersion(apiVersion).organizationId(organizationId).callTimeout(timeout)
.connectTimeout(timeout).readTimeout(timeout).writeTimeout(timeout).proxy(proxy)
.logRequests(logRequests).logResponses(logResponses).userAgent(DEFAULT_USER_AGENT)
.customHeaders(customHeaders).build();
this.modelName = getOrDefault(modelName, GPT_3_5_TURBO);
this.apiVersion = apiVersion;
this.temperature = getOrDefault(temperature, 0.7);
this.topP = topP;
this.stop = stop;

View File

@@ -0,0 +1,29 @@
package dev.langchain4j.opensearch.spring;
import io.milvus.common.clientenum.ConsistencyLevelEnum;
import io.milvus.param.IndexType;
import io.milvus.param.MetricType;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
class EmbeddingStoreProperties {
private String uri;
private String host;
private Integer port;
private String serviceName;
private String region;
private String collectionName;
private Integer dimension;
private IndexType indexType;
private MetricType metricType;
private String token;
private String user;
private String password;
private ConsistencyLevelEnum consistencyLevel;
private Boolean retrieveEmbeddingsOnSearch;
private String databaseName;
private Boolean autoFlushOnInsert;
}

View File

@@ -0,0 +1,20 @@
package dev.langchain4j.opensearch.spring;
import dev.langchain4j.store.embedding.EmbeddingStoreFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static dev.langchain4j.opensearch.spring.Properties.PREFIX;
@Configuration
@EnableConfigurationProperties(dev.langchain4j.opensearch.spring.Properties.class)
public class OpenSearchAutoConfig {
@Bean
@ConditionalOnProperty(PREFIX + ".embedding-store.uri")
EmbeddingStoreFactory milvusChatModel(Properties properties) {
return new OpenSearchEmbeddingStoreFactory(properties.getEmbeddingStore());
}
}

View File

@@ -0,0 +1,58 @@
package dev.langchain4j.opensearch.spring;
import com.tencent.supersonic.common.pojo.EmbeddingStoreConfig;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.store.embedding.BaseEmbeddingStoreFactory;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.opensearch.OpenSearchEmbeddingStore;
import org.apache.hc.client5.http.auth.AuthScope;
import org.apache.hc.client5.http.auth.UsernamePasswordCredentials;
import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider;
import org.apache.hc.core5.http.HttpHost;
import org.opensearch.client.transport.aws.AwsSdk2TransportOptions;
import org.springframework.beans.BeanUtils;
import java.net.URI;
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
/**
* @author zyc
*/
public class OpenSearchEmbeddingStoreFactory extends BaseEmbeddingStoreFactory {
private final EmbeddingStoreProperties storeProperties;
public OpenSearchEmbeddingStoreFactory(EmbeddingStoreConfig storeConfig) {
this(createPropertiesFromConfig(storeConfig));
}
public OpenSearchEmbeddingStoreFactory(EmbeddingStoreProperties storeProperties) {
this.storeProperties = storeProperties;
}
private static EmbeddingStoreProperties createPropertiesFromConfig(
EmbeddingStoreConfig storeConfig) {
EmbeddingStoreProperties embeddingStore = new EmbeddingStoreProperties();
BeanUtils.copyProperties(storeConfig, embeddingStore);
embeddingStore.setUri(storeConfig.getBaseUrl());
embeddingStore.setToken(storeConfig.getApiKey());
embeddingStore.setDatabaseName(storeConfig.getDatabaseName());
return embeddingStore;
}
@Override
public EmbeddingStore<TextSegment> createEmbeddingStore(String collectionName) {
final AwsSdk2TransportOptions options =
AwsSdk2TransportOptions.builder()
.setCredentials(StaticCredentialsProvider.create(AwsBasicCredentials
.create(storeProperties.getUser(), storeProperties.getPassword())))
.build();
final String indexName = storeProperties.getDatabaseName() + "_" + collectionName;
return OpenSearchEmbeddingStore.builder().serviceName(storeProperties.getServiceName())
.serverUrl(storeProperties.getUri()).region(storeProperties.getRegion())
.indexName(indexName).userName(storeProperties.getUser())
.password(storeProperties.getPassword()).apiKey(storeProperties.getToken())
.options(options).build();
}
}

View File

@@ -0,0 +1,17 @@
package dev.langchain4j.opensearch.spring;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.NestedConfigurationProperty;
@Getter
@Setter
@ConfigurationProperties(prefix = Properties.PREFIX)
public class Properties {
static final String PREFIX = "langchain4j.opensearch";
@NestedConfigurationProperty
dev.langchain4j.opensearch.spring.EmbeddingStoreProperties embeddingStore;
}

View File

@@ -18,13 +18,14 @@ public class OpenAiModelFactory implements ModelFactory, InitializingBean {
public static final String DEFAULT_BASE_URL = "https://api.openai.com/v1";
public static final String DEFAULT_MODEL_NAME = "gpt-4o-mini";
public static final String DEFAULT_EMBEDDING_MODEL_NAME = "text-embedding-ada-002";
public static final String DEFAULT_API_VERSION = "2024-02-01";
@Override
public ChatLanguageModel createChatModel(ChatModelConfig modelConfig) {
return OpenAiChatModel.builder().baseUrl(modelConfig.getBaseUrl())
.modelName(modelConfig.getModelName()).apiKey(modelConfig.keyDecrypt())
.temperature(modelConfig.getTemperature()).topP(modelConfig.getTopP())
.maxRetries(modelConfig.getMaxRetries())
.apiVersion(modelConfig.getApiVersion()).temperature(modelConfig.getTemperature())
.topP(modelConfig.getTopP()).maxRetries(modelConfig.getMaxRetries())
.timeout(Duration.ofSeconds(modelConfig.getTimeOut()))
.logRequests(modelConfig.getLogRequests())
.logResponses(modelConfig.getLogResponses()).build();

View File

@@ -6,6 +6,7 @@ import com.tencent.supersonic.common.util.ContextUtils;
import dev.langchain4j.chroma.spring.ChromaEmbeddingStoreFactory;
import dev.langchain4j.inmemory.spring.InMemoryEmbeddingStoreFactory;
import dev.langchain4j.milvus.spring.MilvusEmbeddingStoreFactory;
import dev.langchain4j.opensearch.spring.OpenSearchEmbeddingStoreFactory;
import dev.langchain4j.pgvector.spring.PgvectorEmbeddingStoreFactory;
import org.apache.commons.lang3.StringUtils;
@@ -45,6 +46,11 @@ public class EmbeddingStoreFactoryProvider {
return factoryMap.computeIfAbsent(embeddingStoreConfig,
storeConfig -> new InMemoryEmbeddingStoreFactory(storeConfig));
}
if (EmbeddingStoreType.OPENSEARCH.name()
.equalsIgnoreCase(embeddingStoreConfig.getProvider())) {
return factoryMap.computeIfAbsent(embeddingStoreConfig,
storeConfig -> new OpenSearchEmbeddingStoreFactory(storeConfig));
}
throw new RuntimeException("Unsupported EmbeddingStoreFactory provider: "
+ embeddingStoreConfig.getProvider());
}

View File

@@ -1,5 +1,5 @@
package dev.langchain4j.store.embedding;
public enum EmbeddingStoreType {
IN_MEMORY, MILVUS, CHROMA, PGVECTOR
IN_MEMORY, MILVUS, CHROMA, PGVECTOR, OPENSEARCH
}

View File

@@ -302,6 +302,37 @@ class SqlReplaceHelperTest {
replaceSql);
}
@Test
void testReplaceAliasWithBackticks() {
String sql = "SELECT 部门, SUM(访问次数) AS 总访问次数 FROM 超音数 WHERE "
+ "datediff('day', 数据日期, '2023-09-05') <= 3 GROUP BY 部门 ORDER BY 总访问次数 DESC LIMIT 10";
String replaceSql = SqlReplaceHelper.replaceAliasWithBackticks(sql);
System.out.println(replaceSql);
Assert.assertEquals("SELECT 部门, SUM(访问次数) AS `总访问次数` FROM 超音数 WHERE "
+ "datediff('day', 数据日期, '2023-09-05') <= 3 GROUP BY 部门 ORDER BY `总访问次数` DESC LIMIT 10",
replaceSql);
sql = "select 部门, sum(访问次数) as 访问次数 from 超音数 where "
+ "(datediff('day', 数据日期, '2023-09-05') <= 3) and 数据日期 = '2023-10-10' "
+ "group by 部门 order by 访问次数 desc limit 10";
replaceSql = SqlReplaceHelper.replaceAliasWithBackticks(sql);
System.out.println(replaceSql);
Assert.assertEquals("SELECT 部门, sum(访问次数) AS `访问次数` FROM 超音数 WHERE (datediff('day', 数据日期, "
+ "'2023-09-05') <= 3) AND 数据日期 = '2023-10-10' GROUP BY 部门 ORDER BY `访问次数` DESC LIMIT 10",
replaceSql);
sql = "select 部门, sum(访问次数) as 访问次数, count(部门) as 部门数, count(部门) as 部门数2, 访问次数 from 超音数 where "
+ "(datediff('day', 数据日期, '2023-09-05') <= 3) and 数据日期 = '2023-10-10' "
+ "group by 部门, 部门数, 部门数2 having 访问次数 > 1 AND 部门数2 > 2 AND 部门数 > 1 AND 访问次数 > 1 order by 访问次数 desc limit 10";
replaceSql = SqlReplaceHelper.replaceAliasWithBackticks(sql);
System.out.println(replaceSql);
Assert.assertEquals(
"SELECT 部门, sum(访问次数) AS `访问次数`, count(部门) AS `部门数`, count(部门) AS `部门数2`, `访问次数` FROM 超音数 WHERE (datediff('day', 数据日期, "
+ "'2023-09-05') <= 3) AND 数据日期 = '2023-10-10' GROUP BY 部门, `部门数`, `部门数2` HAVING `访问次数` > 1 AND `部门数2` > 2 AND `部门数` > 1 AND `访问次数` > 1 ORDER BY `访问次数` DESC LIMIT 10",
replaceSql);
}
@Test
void testReplaceAliasFieldName() {
Map<String, String> map = new HashMap<>();

View File

@@ -1,25 +1,15 @@
# Use an official OpenJDK runtime as a parent image
FROM openjdk:21-jdk-bullseye
FROM supersonicbi/supersonic:0.9.10-SNAPSHOT
# Set the working directory in the container
WORKDIR /usr/src/app
# Delete old supersonic installation directory and the symbolic link
RUN rm -rf /usr/src/app/supersonic-standalone-0.9.10-SNAPSHOT
RUN rm -f /usr/src/app/supersonic-standalone-latest
# Argument to pass in the supersonic version at build time
ARG SUPERSONIC_VERSION
# Install necessary packages, including Postgres client
RUN apt-get update && apt-get install -y postgresql-client
# Install the vim editor.
RUN apt-get update && apt-get install -y vim && \
rm -rf /var/lib/apt/lists/*
# Update the package list and install iputils-ping.
RUN apt-get update && apt-get install -y iputils-ping
# 更新包列表并安装 dnsutils 包
RUN apt-get update && apt-get install -y dnsutils
# Copy the supersonic standalone zip file into the container
COPY assembly/build/supersonic-standalone-${SUPERSONIC_VERSION}.zip .

View File

@@ -1,5 +1,5 @@
services:
postgres:
supersonic_postgres:
image: pgvector/pgvector:pg17
privileged: true
container_name: supersonic_postgres
@@ -21,35 +21,10 @@ services:
- 8.8.4.4
healthcheck:
test: ["CMD-SHELL", "sh -c 'pg_isready -U supersonic_user -d postgres'"]
interval: 30s
interval: 10s
timeout: 10s
retries: 5
db_init:
image: supersonicbi/supersonic:${SUPERSONIC_VERSION:-latest}
privileged: true
container_name: supersonic_db_init
depends_on:
postgres:
condition: service_healthy
networks:
- supersonic_network
command: >
sh -c "
if ! PGPASSWORD=supersonic_password psql -h supersonic_postgres -U supersonic_user -d postgres -c 'select * from s2_database limit 1' > /dev/null;
then
PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/schema-postgres.sql
PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/schema-postgres-demo.sql
PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/data-postgres.sql
PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/data-postgres-demo.sql
else
echo 'Database already initialized.'
fi"
dns:
- 114.114.114.114
- 8.8.8.8
- 8.8.4.4
supersonic_standalone:
image: supersonicbi/supersonic:${SUPERSONIC_VERSION:-latest}
privileged: true
@@ -62,10 +37,8 @@ services:
ports:
- "9080:9080"
depends_on:
postgres:
supersonic_postgres:
condition: service_healthy
db_init:
condition: service_completed_successfully
networks:
- supersonic_network
dns:

View File

@@ -15,7 +15,7 @@ public class ColumnSchema {
private FieldType filedType;
private AggOperatorEnum agg = AggOperatorEnum.ANY;
private AggOperatorEnum agg = AggOperatorEnum.NONE;
private String name;

View File

@@ -16,6 +16,7 @@ import java.util.stream.Collectors;
public class DataSetSchema implements Serializable {
private String databaseType;
private String databaseVersion;
private SchemaElement dataSet;
private Set<SchemaElement> metrics = new HashSet<>();
private Set<SchemaElement> dimensions = new HashSet<>();

View File

@@ -11,6 +11,8 @@ import java.util.List;
@NoArgsConstructor
public class DbSchema {
private String catalog;
private String db;
private String table;

View File

@@ -1,10 +1,6 @@
package com.tencent.supersonic.headless.api.pojo;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.*;
import java.io.Serializable;
@@ -21,6 +17,7 @@ public class SchemaElementMatch implements Serializable {
private String word;
private Long frequency;
private boolean isInherited;
private boolean llmMatched;
public boolean isFullMatched() {
return 1.0 == similarity;

View File

@@ -109,7 +109,8 @@ public class SemanticParseInfo implements Serializable {
}
}
private static class SchemaNameLengthComparator implements Comparator<SchemaElement> {
private static class SchemaNameLengthComparator
implements Comparator<SchemaElement>, Serializable {
@Override
public int compare(SchemaElement o1, SchemaElement o2) {
if (o1.getOrder() != o2.getOrder()) {

View File

@@ -2,8 +2,11 @@ package com.tencent.supersonic.headless.api.pojo;
import lombok.Data;
import java.io.Serializable;
@Data
public class SqlEvaluation {
public class SqlEvaluation implements Serializable {
private static final long serialVersionUID = 1L;
private Boolean isValidated;
private String validateMsg;

View File

@@ -2,8 +2,11 @@ package com.tencent.supersonic.headless.api.pojo;
import lombok.Data;
import java.io.Serializable;
@Data
public class SqlInfo {
public class SqlInfo implements Serializable {
private static final long serialVersionUID = 1L;
// S2SQL generated by semantic parsers
private String parsedS2SQL;

View File

@@ -1,5 +1,12 @@
package com.tencent.supersonic.headless.api.pojo.enums;
public enum ChatWorkflowState {
MAPPING, PARSING, CORRECTING, TRANSLATING, PROCESSING, FINISHED
MAPPING,
PARSING,
S2SQL_CORRECTING,
TRANSLATING,
VALIDATING,
SQL_CORRECTING,
PROCESSING,
FINISHED
}

View File

@@ -8,7 +8,9 @@ import java.util.Set;
public enum DataType {
MYSQL("mysql", "mysql", "com.mysql.cj.jdbc.Driver", "`", "`", "'", "'"),
HIVE2("hive2", "hive", "org.apache.hive.jdbc.HiveDriver", "`", "`", "`", "`"),
HIVE2("hive2", "hive", "org.apache.kyuubi.jdbc.KyuubiHiveDriver", "`", "`", "`", "`"),
KYUUBI("kyuubi", "kyuubi", "org.apache.kyuubi.jdbc.KyuubiHiveDriver", "`", "`", "`", "`"),
ORACLE("oracle", "oracle", "oracle.jdbc.driver.OracleDriver", "\"", "\"", "\"", "\""),
@@ -27,6 +29,8 @@ public enum DataType {
PRESTO("presto", "presto", "com.facebook.presto.jdbc.PrestoDriver", "\"", "\"", "\"", "\""),
TRINO("trino", "trino", "io.trino.jdbc.TrinoDriver", "\"", "\"", "\"", "\""),
MOONBOX("moonbox", "moonbox", "moonbox.jdbc.MbDriver", "`", "`", "`", "`"),
CASSANDRA("cassandra", "cassandra", "com.github.adejanovski.cassandra.jdbc.CassandraDriver", "",
@@ -46,6 +50,7 @@ public enum DataType {
TDENGINE("TAOS", "TAOS", "com.taosdata.jdbc.TSDBDriver", "'", "'", "\"", "\""),
POSTGRESQL("postgresql", "postgresql", "org.postgresql.Driver", "'", "'", "\"", "\""),
DUCKDB("duckdb", "duckdb", "org.duckdb.DuckDBDriver", "'", "'", "\"", "\"");
private String feature;

View File

@@ -19,6 +19,8 @@ public class ModelBuildReq {
private String sql;
private String catalog;
private String db;
private List<String> tables;

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.api.pojo.request;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DateConf;
@@ -281,7 +282,7 @@ public class QueryStructReq extends SemanticQueryReq {
public String getTableName() {
if (StringUtils.isNotBlank(dataSetName)) {
return dataSetName;
return SqlReplaceHelper.escapeTableName(dataSetName);
}
if (dataSetId != null) {
return Constants.TABLE_PREFIX + dataSetId;

View File

@@ -1,7 +1,6 @@
package com.tencent.supersonic.headless.api.pojo.response;
import com.google.common.collect.Lists;
import com.tencent.supersonic.headless.api.pojo.Identify;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@@ -14,6 +13,7 @@ import java.util.List;
public class DataSetSchemaResp extends DataSetResp {
private String databaseType;
private String databaseVersion;
private List<MetricSchemaResp> metrics = Lists.newArrayList();
private List<DimSchemaResp> dimensions = Lists.newArrayList();
private List<ModelResp> modelResps = Lists.newArrayList();

View File

@@ -1,16 +1,16 @@
package com.tencent.supersonic.headless.api.pojo.response;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.Text2SQLExemplar;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import lombok.Data;
import java.io.Serializable;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
@Data
public class ParseResp {
public class ParseResp implements Serializable {
private final String queryText;
private ParseState state = ParseState.PENDING;
private String errorMsg;

View File

@@ -2,8 +2,10 @@ package com.tencent.supersonic.headless.api.pojo.response;
import lombok.Data;
import java.io.Serializable;
@Data
public class ParseTimeCostResp {
public class ParseTimeCostResp implements Serializable {
private long parseStartTime;
private long parseTime;

View File

@@ -8,6 +8,7 @@ import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.api.pojo.SemanticSchema;
import com.tencent.supersonic.headless.api.pojo.enums.ChatWorkflowState;
import com.tencent.supersonic.headless.api.pojo.request.QueryNLReq;
import com.tencent.supersonic.headless.api.pojo.response.ParseResp;
import com.tencent.supersonic.headless.chat.query.SemanticQuery;
import lombok.Data;
@@ -23,6 +24,7 @@ import java.util.stream.Collectors;
public class ChatQueryContext implements Serializable {
private QueryNLReq request;
private ParseResp parseResp;
private Map<Long, List<Long>> modelIdToDataSetIds;
private List<SemanticQuery> candidateQueries = new ArrayList<>();
private SchemaMapInfo mapInfo = new SchemaMapInfo();

View File

@@ -13,6 +13,7 @@ public class EmbeddingResult extends MapResult {
private String id;
private Map<String, String> metadata;
private boolean llmMatched;
@Override
public boolean equals(Object o) {

View File

@@ -1,9 +1,12 @@
package com.tencent.supersonic.headless.chat.mapper;
import com.tencent.supersonic.common.pojo.enums.Text2SQLType;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.api.pojo.SchemaElement;
import com.tencent.supersonic.headless.api.pojo.SchemaElementMatch;
import com.tencent.supersonic.headless.api.pojo.SchemaElementType;
import com.tencent.supersonic.headless.api.pojo.SchemaMapInfo;
import com.tencent.supersonic.headless.api.pojo.enums.MapModeEnum;
import com.tencent.supersonic.headless.chat.ChatQueryContext;
import com.tencent.supersonic.headless.chat.knowledge.EmbeddingResult;
@@ -11,6 +14,7 @@ import com.tencent.supersonic.headless.chat.knowledge.builder.BaseWordBuilder;
import com.tencent.supersonic.headless.chat.knowledge.helper.HanlpHelper;
import dev.langchain4j.store.embedding.Retrieval;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.CollectionUtils;
import java.util.List;
import java.util.Objects;
@@ -23,10 +27,16 @@ public class EmbeddingMapper extends BaseMapper {
@Override
public boolean accept(ChatQueryContext chatQueryContext) {
return MapModeEnum.LOOSE.equals(chatQueryContext.getRequest().getMapModeEnum());
boolean b0 = MapModeEnum.LOOSE.equals(chatQueryContext.getRequest().getMapModeEnum());
boolean b1 = chatQueryContext.getRequest().getText2SQLType() == Text2SQLType.LLM_OR_RULE;
return b0 || b1;
}
public void doMap(ChatQueryContext chatQueryContext) {
// TODO: 如果是在LOOSE执行过了那么在LLM_OR_RULE阶段可以不用执行所以这里缺乏一个状态来传递暂时先忽略这个浪费行为吧
SchemaMapInfo mappedInfo = chatQueryContext.getMapInfo();
// 1. Query from embedding by queryText
EmbeddingMatchStrategy matchStrategy = ContextUtils.getBean(EmbeddingMatchStrategy.class);
List<EmbeddingResult> matchResults = getMatches(chatQueryContext, matchStrategy);
@@ -53,15 +63,26 @@ public class EmbeddingMapper extends BaseMapper {
continue;
}
// Build SchemaElementMatch object
SchemaElementMatch schemaElementMatch = SchemaElementMatch.builder()
.element(schemaElement).frequency(BaseWordBuilder.DEFAULT_FREQUENCY)
.word(matchResult.getName()).similarity(matchResult.getSimilarity())
.detectWord(matchResult.getDetectWord()).build();
schemaElementMatch.setLlmMatched(matchResult.isLlmMatched());
// 3. Add SchemaElementMatch to mapInfo
addToSchemaMap(chatQueryContext.getMapInfo(), dataSetId, schemaElementMatch);
}
if (CollectionUtils.isEmpty(matchResults)) {
log.info("embedding mapper no match");
} else {
for (EmbeddingResult matchResult : matchResults) {
log.info("embedding match name=[{}],detectWord=[{}],similarity=[{}],metadata=[{}]",
matchResult.getName(), matchResult.getDetectWord(),
matchResult.getSimilarity(), JsonUtil.toString(matchResult.getMetadata()));
}
}
}
}

View File

@@ -1,9 +1,17 @@
package com.tencent.supersonic.headless.chat.mapper;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import com.hankcs.hanlp.seg.common.Term;
import com.tencent.supersonic.headless.api.pojo.response.S2Term;
import com.tencent.supersonic.headless.chat.ChatQueryContext;
import com.tencent.supersonic.headless.chat.knowledge.EmbeddingResult;
import com.tencent.supersonic.headless.chat.knowledge.MetaEmbeddingService;
import com.tencent.supersonic.headless.chat.knowledge.helper.HanlpHelper;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.provider.ModelProvider;
import dev.langchain4j.store.embedding.Retrieval;
import dev.langchain4j.store.embedding.RetrieveQuery;
import dev.langchain4j.store.embedding.RetrieveQueryResult;
@@ -14,18 +22,12 @@ import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import static com.tencent.supersonic.headless.chat.mapper.MapperConfig.EMBEDDING_MAPPER_NUMBER;
import static com.tencent.supersonic.headless.chat.mapper.MapperConfig.EMBEDDING_MAPPER_ROUND_NUMBER;
import static com.tencent.supersonic.headless.chat.mapper.MapperConfig.EMBEDDING_MAPPER_THRESHOLD;
import static com.tencent.supersonic.headless.chat.mapper.MapperConfig.*;
/**
* EmbeddingMatchStrategy uses vector database to perform similarity search against the embeddings
@@ -35,37 +37,167 @@ import static com.tencent.supersonic.headless.chat.mapper.MapperConfig.EMBEDDING
@Slf4j
public class EmbeddingMatchStrategy extends BatchMatchStrategy<EmbeddingResult> {
@Autowired
protected MapperConfig mapperConfig;
@Autowired
private MetaEmbeddingService metaEmbeddingService;
private static final String LLM_FILTER_PROMPT =
"""
\
#Role: You are a professional data analyst specializing in metrics and dimensions.
#Task: Given a user query and a list of retrieved metrics/dimensions through vector recall,
please analyze which metrics/dimensions the user is most likely interested in.
#Rules:
1. Based on user query and retrieved info, accurately determine metrics/dimensions user truly cares about.
2. Do not return all retrieved info, only select those highly relevant to user query.
3. Maintain high quality output, exclude metrics/dimensions irrelevant to user intent.
4. Output must be in JSON array format, only include IDs from retrieved info, e.g.: ['id1', 'id2']
5. Return JSON content directly without markdown formatting
#Input Example:
#User Query: {{userText}}
#Retrieved Metrics/Dimensions: {{retrievedInfo}}
#Output:""";
@Override
public List<EmbeddingResult> detect(ChatQueryContext chatQueryContext, List<S2Term> terms,
Set<Long> detectDataSetIds) {
if (chatQueryContext == null || CollectionUtils.isEmpty(detectDataSetIds)) {
log.warn("Invalid input parameters: context={}, dataSetIds={}", chatQueryContext,
detectDataSetIds);
return Collections.emptyList();
}
// 1. Base detection
List<EmbeddingResult> baseResults = super.detect(chatQueryContext, terms, detectDataSetIds);
boolean useLLM =
Boolean.parseBoolean(mapperConfig.getParameterValue(EMBEDDING_MAPPER_USE_LLM));
// 2. LLM enhanced detection
if (useLLM) {
List<EmbeddingResult> llmResults = detectWithLLM(chatQueryContext, detectDataSetIds);
if (!CollectionUtils.isEmpty(llmResults)) {
baseResults.addAll(llmResults);
}
}
// 3. Deduplicate results
return baseResults.stream().distinct().collect(Collectors.toList());
}
/**
* Perform enhanced detection using LLM
*/
private List<EmbeddingResult> detectWithLLM(ChatQueryContext chatQueryContext,
Set<Long> detectDataSetIds) {
try {
String queryText = chatQueryContext.getRequest().getQueryText();
if (StringUtils.isBlank(queryText)) {
return Collections.emptyList();
}
// Get segmentation results
Set<String> detectSegments = extractValidSegments(queryText);
if (CollectionUtils.isEmpty(detectSegments)) {
log.info("No valid segments found for text: {}", queryText);
return Collections.emptyList();
}
return detectByBatch(chatQueryContext, detectDataSetIds, detectSegments, true);
} catch (Exception e) {
log.error("Error in LLM detection for context: {}", chatQueryContext, e);
return Collections.emptyList();
}
}
/**
* Extract valid word segments by filtering out unwanted word natures
*/
private Set<String> extractValidSegments(String text) {
List<String> natureList = Arrays.asList(StringUtils.split(
mapperConfig.getParameterValue(EMBEDDING_MAPPER_ALLOWED_SEGMENT_NATURE), ","));
return HanlpHelper.getSegment().seg(text).stream()
.filter(t -> natureList.stream().noneMatch(nature -> t.nature.startsWith(nature)))
.map(Term::getWord).collect(Collectors.toSet());
}
@Override
public List<EmbeddingResult> detectByBatch(ChatQueryContext chatQueryContext,
Set<Long> detectDataSetIds, Set<String> detectSegments) {
return detectByBatch(chatQueryContext, detectDataSetIds, detectSegments, false);
}
/**
* Process detection in batches with LLM option
*
* @param chatQueryContext The context of the chat query
* @param detectDataSetIds Target dataset IDs for detection
* @param detectSegments Segments to be detected
* @param useLlm Whether to use LLM for filtering results
* @return List of embedding results
*/
public List<EmbeddingResult> detectByBatch(ChatQueryContext chatQueryContext,
Set<Long> detectDataSetIds, Set<String> detectSegments, boolean useLlm) {
Set<EmbeddingResult> results = ConcurrentHashMap.newKeySet();
int embeddingMapperBatch = Integer
.valueOf(mapperConfig.getParameterValue(MapperConfig.EMBEDDING_MAPPER_BATCH));
List<String> queryTextsList =
detectSegments.stream().map(detectSegment -> detectSegment.trim())
.filter(detectSegment -> StringUtils.isNotBlank(detectSegment))
.collect(Collectors.toList());
// Process and filter query texts
List<String> queryTextsList = detectSegments.stream().map(String::trim)
.filter(StringUtils::isNotBlank).collect(Collectors.toList());
// Partition queries into sub-lists for batch processing
List<List<String>> queryTextsSubList =
Lists.partition(queryTextsList, embeddingMapperBatch);
// Create and execute tasks for each batch
List<Callable<Void>> tasks = new ArrayList<>();
for (List<String> queryTextsSub : queryTextsSubList) {
tasks.add(createTask(chatQueryContext, detectDataSetIds, queryTextsSub, results));
tasks.add(
createTask(chatQueryContext, detectDataSetIds, queryTextsSub, results, useLlm));
}
executeTasks(tasks);
// Apply LLM filtering if enabled
if (useLlm) {
Map<String, Object> variable = new HashMap<>();
variable.put("userText", chatQueryContext.getRequest().getQueryText());
variable.put("retrievedInfo", JSONObject.toJSONString(results));
Prompt prompt = PromptTemplate.from(LLM_FILTER_PROMPT).apply(variable);
ChatLanguageModel chatLanguageModel = ModelProvider.getChatModel();
String response = chatLanguageModel.generate(prompt.toUserMessage().singleText());
if (StringUtils.isBlank(response)) {
results.clear();
} else {
List<String> retrievedIds = JSONObject.parseArray(response, String.class);
results = results.stream().filter(t -> retrievedIds.contains(t.getId()))
.collect(Collectors.toSet());
results.forEach(r -> r.setLlmMatched(true));
}
}
return new ArrayList<>(results);
}
/**
* Create a task for batch processing
*
* @param chatQueryContext The context of the chat query
* @param detectDataSetIds Target dataset IDs
* @param queryTextsSub Sub-list of query texts to process
* @param results Shared result set for collecting results
* @param useLlm Whether to use LLM
* @return Callable task
*/
private Callable<Void> createTask(ChatQueryContext chatQueryContext, Set<Long> detectDataSetIds,
List<String> queryTextsSub, Set<EmbeddingResult> results) {
List<String> queryTextsSub, Set<EmbeddingResult> results, boolean useLlm) {
return () -> {
List<EmbeddingResult> oneRoundResults =
detectByQueryTextsSub(detectDataSetIds, queryTextsSub, chatQueryContext);
List<EmbeddingResult> oneRoundResults = detectByQueryTextsSub(detectDataSetIds,
queryTextsSub, chatQueryContext, useLlm);
synchronized (results) {
selectResultInOneRound(results, oneRoundResults);
}
@@ -73,57 +205,73 @@ public class EmbeddingMatchStrategy extends BatchMatchStrategy<EmbeddingResult>
};
}
/**
* Process a sub-list of query texts
*
* @param detectDataSetIds Target dataset IDs
* @param queryTextsSub Sub-list of query texts
* @param chatQueryContext Chat query context
* @param useLlm Whether to use LLM
* @return List of embedding results for this batch
*/
private List<EmbeddingResult> detectByQueryTextsSub(Set<Long> detectDataSetIds,
List<String> queryTextsSub, ChatQueryContext chatQueryContext) {
List<String> queryTextsSub, ChatQueryContext chatQueryContext, boolean useLlm) {
Map<Long, List<Long>> modelIdToDataSetIds = chatQueryContext.getModelIdToDataSetIds();
// Get configuration parameters
double threshold =
Double.valueOf(mapperConfig.getParameterValue(EMBEDDING_MAPPER_THRESHOLD));
// step1. build query params
RetrieveQuery retrieveQuery = RetrieveQuery.builder().queryTextsList(queryTextsSub).build();
// step2. retrieveQuery by detectSegment
Double.parseDouble(mapperConfig.getParameterValue(EMBEDDING_MAPPER_THRESHOLD));
int embeddingNumber =
Integer.valueOf(mapperConfig.getParameterValue(EMBEDDING_MAPPER_NUMBER));
Integer.parseInt(mapperConfig.getParameterValue(EMBEDDING_MAPPER_NUMBER));
int embeddingRoundNumber =
Integer.parseInt(mapperConfig.getParameterValue(EMBEDDING_MAPPER_ROUND_NUMBER));
// Build and execute query
RetrieveQuery retrieveQuery = RetrieveQuery.builder().queryTextsList(queryTextsSub).build();
List<RetrieveQueryResult> retrieveQueryResults = metaEmbeddingService.retrieveQuery(
retrieveQuery, embeddingNumber, modelIdToDataSetIds, detectDataSetIds);
if (CollectionUtils.isEmpty(retrieveQueryResults)) {
return new ArrayList<>();
return Collections.emptyList();
}
// step3. build EmbeddingResults
List<EmbeddingResult> collect = retrieveQueryResults.stream().map(retrieveQueryResult -> {
List<Retrieval> retrievals = retrieveQueryResult.getRetrieval();
if (CollectionUtils.isNotEmpty(retrievals)) {
retrievals.removeIf(retrieval -> {
if (!retrieveQueryResult.getQuery().contains(retrieval.getQuery())) {
return retrieval.getSimilarity() < threshold;
}
return false;
});
// Process results
List<EmbeddingResult> collect = retrieveQueryResults.stream().peek(result -> {
if (!useLlm && CollectionUtils.isNotEmpty(result.getRetrieval())) {
result.getRetrieval()
.removeIf(retrieval -> !result.getQuery().contains(retrieval.getQuery())
&& retrieval.getSimilarity() < threshold);
}
return retrieveQueryResult;
}).filter(retrieveQueryResult -> CollectionUtils
.isNotEmpty(retrieveQueryResult.getRetrieval()))
.flatMap(retrieveQueryResult -> retrieveQueryResult.getRetrieval().stream()
.map(retrieval -> {
EmbeddingResult embeddingResult = new EmbeddingResult();
BeanUtils.copyProperties(retrieval, embeddingResult);
embeddingResult.setDetectWord(retrieveQueryResult.getQuery());
embeddingResult.setName(retrieval.getQuery());
Map<String, String> convertedMap = retrieval.getMetadata().entrySet()
.stream().collect(Collectors.toMap(Map.Entry::getKey,
entry -> entry.getValue().toString()));
embeddingResult.setMetadata(convertedMap);
return embeddingResult;
}))
}).filter(result -> CollectionUtils.isNotEmpty(result.getRetrieval()))
.flatMap(result -> result.getRetrieval().stream()
.map(retrieval -> convertToEmbeddingResult(result, retrieval)))
.collect(Collectors.toList());
// step4. select mapResul in one round
int embeddingRoundNumber =
Integer.valueOf(mapperConfig.getParameterValue(EMBEDDING_MAPPER_ROUND_NUMBER));
int roundNumber = embeddingRoundNumber * queryTextsSub.size();
return collect.stream().sorted(Comparator.comparingDouble(EmbeddingResult::getSimilarity))
.limit(roundNumber).collect(Collectors.toList());
// Sort and limit results
return collect.stream()
.sorted(Comparator.comparingDouble(EmbeddingResult::getSimilarity).reversed())
.limit(embeddingRoundNumber * queryTextsSub.size()).collect(Collectors.toList());
}
/**
* Convert RetrieveQueryResult and Retrieval to EmbeddingResult
*
* @param queryResult The query result containing retrieval information
* @param retrieval The retrieval data to be converted
* @return Converted EmbeddingResult
*/
private EmbeddingResult convertToEmbeddingResult(RetrieveQueryResult queryResult,
Retrieval retrieval) {
EmbeddingResult embeddingResult = new EmbeddingResult();
BeanUtils.copyProperties(retrieval, embeddingResult);
embeddingResult.setDetectWord(queryResult.getQuery());
embeddingResult.setName(retrieval.getQuery());
// Convert metadata to string values
Map<String, String> metadata = retrieval.getMetadata().entrySet().stream().collect(
Collectors.toMap(Map.Entry::getKey, entry -> String.valueOf(entry.getValue())));
embeddingResult.setMetadata(metadata);
return embeddingResult;
}
}

View File

@@ -7,12 +7,7 @@ import com.tencent.supersonic.headless.chat.ChatQueryContext;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
@@ -66,7 +61,8 @@ public class MapFilter {
List<SchemaElementMatch> value = entry.getValue();
if (!CollectionUtils.isEmpty(value)) {
value.removeIf(schemaElementMatch -> StringUtils
.length(schemaElementMatch.getDetectWord()) <= 1);
.length(schemaElementMatch.getDetectWord()) <= 1
&& !schemaElementMatch.isLlmMatched());
}
}
}

View File

@@ -57,4 +57,12 @@ public class MapperConfig extends ParameterConfig {
public static final Parameter EMBEDDING_MAPPER_ROUND_NUMBER =
new Parameter("s2.mapper.embedding.round.number", "10", "向量召回最小相似度阈值",
"向量召回相似度阈值在动态调整中的最低值", "number", "Mapper相关配置");
public static final Parameter EMBEDDING_MAPPER_USE_LLM =
new Parameter("s2.mapper.embedding.use-llm-enhance", "false", "使用LLM对召回的向量进行二次判断开关",
"embedding的结果再通过一次LLM来筛选这时候忽略各个向量阀值", "bool", "Mapper相关配置");
public static final Parameter EMBEDDING_MAPPER_ALLOWED_SEGMENT_NATURE =
new Parameter("s2.mapper.embedding.allowed-segment-nature", "['v', 'd', 'a']",
"使用LLM召回二次处理时对问题分词词性的控制", "分词后允许的词性才会进行向量召回", "list", "Mapper相关配置");
}

View File

@@ -1,11 +1,8 @@
package com.tencent.supersonic.headless.chat.parser.llm;
import com.tencent.supersonic.common.pojo.Pair;
import com.tencent.supersonic.common.util.DateUtils;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
import com.tencent.supersonic.headless.api.pojo.SchemaElement;
import com.tencent.supersonic.headless.api.pojo.SchemaElementMatch;
import com.tencent.supersonic.headless.api.pojo.SchemaElementType;
import com.tencent.supersonic.headless.api.pojo.SemanticSchema;
import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.chat.ChatQueryContext;
import com.tencent.supersonic.headless.chat.parser.ParserConfig;
import com.tencent.supersonic.headless.chat.query.llm.s2sql.LLMReq;
@@ -17,11 +14,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.*;
import java.util.stream.Collectors;
import static com.tencent.supersonic.headless.chat.parser.ParserConfig.*;
@@ -56,7 +49,9 @@ public class LLMRequestService {
LLMReq llmReq = new LLMReq();
llmReq.setQueryText(queryText);
llmReq.setSchema(llmSchema);
llmSchema.setDatabaseType(getDatabaseType(queryCtx, dataSetId));
Pair<String, String> databaseInfo = getDatabaseType(queryCtx, dataSetId);
llmSchema.setDatabaseType(databaseInfo.first);
llmSchema.setDatabaseVersion(databaseInfo.second);
llmSchema.setDataSetId(dataSetId);
llmSchema.setDataSetName(dataSetIdToName.get(dataSetId));
llmSchema.setPartitionTime(getPartitionTime(queryCtx, dataSetId));
@@ -171,13 +166,14 @@ public class LLMRequestService {
return dataSetSchema.getPrimaryKey();
}
protected String getDatabaseType(@NotNull ChatQueryContext queryCtx, Long dataSetId) {
protected Pair<String, String> getDatabaseType(@NotNull ChatQueryContext queryCtx,
Long dataSetId) {
SemanticSchema semanticSchema = queryCtx.getSemanticSchema();
if (semanticSchema == null || semanticSchema.getDataSetSchemaMap() == null) {
return null;
}
Map<Long, DataSetSchema> dataSetSchemaMap = semanticSchema.getDataSetSchemaMap();
DataSetSchema dataSetSchema = dataSetSchemaMap.get(dataSetId);
return dataSetSchema.getDatabaseType();
return new Pair(dataSetSchema.getDatabaseType(), dataSetSchema.getDatabaseVersion());
}
}

View File

@@ -3,7 +3,9 @@ package com.tencent.supersonic.headless.chat.parser.llm;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.Text2SQLExemplar;
import com.tencent.supersonic.common.pojo.enums.DataFormatTypeEnum;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.service.ExemplarService;
import com.tencent.supersonic.common.util.StringUtil;
import com.tencent.supersonic.headless.chat.parser.ParserConfig;
import com.tencent.supersonic.headless.chat.query.llm.s2sql.LLMReq;
import lombok.extern.slf4j.Slf4j;
@@ -15,10 +17,9 @@ import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import static com.tencent.supersonic.headless.chat.parser.ParserConfig.PARSER_EXEMPLAR_RECALL_NUMBER;
import static com.tencent.supersonic.headless.chat.parser.ParserConfig.PARSER_FEW_SHOT_NUMBER;
import static com.tencent.supersonic.headless.chat.parser.ParserConfig.PARSER_SELF_CONSISTENCY_NUMBER;
import static com.tencent.supersonic.headless.chat.parser.ParserConfig.*;
@Component
@Slf4j
@@ -67,6 +68,11 @@ public class PromptHelper {
sideInfos.add(String.format("PriorKnowledge=[%s]", llmReq.getPriorExts()));
}
LLMReq.LLMSchema schema = llmReq.getSchema();
if (!isSupportWith(schema.getDatabaseType(), schema.getDatabaseVersion())) {
sideInfos.add("[Database does not support with statement]");
}
String termStr = buildTermStr(llmReq);
if (StringUtils.isNotEmpty(termStr)) {
sideInfos.add(String.format("DomainTerms=[%s]", termStr));
@@ -100,8 +106,6 @@ public class PromptHelper {
}
if (StringUtils.isNotEmpty(metric.getDefaultAgg())) {
metricStr.append(" AGGREGATE '" + metric.getDefaultAgg().toUpperCase() + "'");
} else {
metricStr.append(" AGGREGATE 'NONE'");
}
metricStr.append(">");
metrics.add(metricStr.toString());
@@ -152,12 +156,17 @@ public class PromptHelper {
if (llmReq.getSchema().getDatabaseType() != null) {
databaseTypeStr = llmReq.getSchema().getDatabaseType();
}
String databaseVersionStr = "";
if (llmReq.getSchema().getDatabaseVersion() != null) {
databaseVersionStr = llmReq.getSchema().getDatabaseVersion();
}
String template =
"DatabaseType=[%s], Table=[%s], PartitionTimeField=[%s], PrimaryKeyField=[%s], "
"DatabaseType=[%s], DatabaseVersion=[%s], Table=[%s], PartitionTimeField=[%s], PrimaryKeyField=[%s], "
+ "Metrics=[%s], Dimensions=[%s], Values=[%s]";
return String.format(template, databaseTypeStr, tableStr, partitionTimeStr, primaryKeyStr,
String.join(",", metrics), String.join(",", dimensions), String.join(",", values));
return String.format(template, databaseTypeStr, databaseVersionStr, tableStr,
partitionTimeStr, primaryKeyStr, String.join(",", metrics),
String.join(",", dimensions), String.join(",", values));
}
private String buildTermStr(LLMReq llmReq) {
@@ -176,4 +185,16 @@ public class PromptHelper {
return ret;
}
public static boolean isSupportWith(String type, String version) {
if (type.equalsIgnoreCase(EngineType.MYSQL.getName()) && Objects.nonNull(version)
&& StringUtil.compareVersion(version, "8.0") < 0) {
return false;
}
if (type.equalsIgnoreCase(EngineType.CLICKHOUSE.getName()) && Objects.nonNull(version)
&& StringUtil.compareVersion(version, "20.4") < 0) {
return false;
}
return true;
}
}

View File

@@ -38,6 +38,7 @@ public class LLMReq {
@Data
public static class LLMSchema {
private String databaseType;
private String databaseVersion;
private Long dataSetId;
private String dataSetName;
private List<SchemaElement> metrics;

View File

@@ -241,7 +241,7 @@ public abstract class RuleSemanticQuery extends BaseSemanticQuery {
.forEach(filter -> filter.setName(bizNameToName.get(filter.getBizName())));
}
List<Filter> metricFilters = queryStructReq.getMetricFilters();
if (CollectionUtils.isNotEmpty(dimensionFilters)) {
if (CollectionUtils.isNotEmpty(metricFilters)) {
metricFilters.forEach(filter -> filter.setName(bizNameToName.get(filter.getBizName())));
}
}

View File

@@ -15,12 +15,19 @@ import com.tencent.supersonic.headless.api.pojo.request.QueryFilter;
import com.tencent.supersonic.headless.api.pojo.request.QueryMultiStructReq;
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.response.DataSetResp;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.util.CollectionUtils;
import java.util.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
@Slf4j
@@ -97,15 +104,16 @@ public class QueryReqBuilder {
* convert to QueryS2SQLReq
*
* @param querySql
* @param dataSetId
* @param dataSet
* @return
*/
public static QuerySqlReq buildS2SQLReq(String querySql, Long dataSetId) {
public static QuerySqlReq buildS2SQLReq(String querySql, DataSetResp dataSet) {
QuerySqlReq querySQLReq = new QuerySqlReq();
if (Objects.nonNull(querySql)) {
querySQLReq.setSql(querySql);
}
querySQLReq.setDataSetId(dataSetId);
querySQLReq.setDataSetId(dataSet.getId());
querySQLReq.setDataSetName(dataSet.getName());
return querySQLReq;
}

View File

@@ -121,6 +121,22 @@
<artifactId>DmJdbcDriver18</artifactId>
<version>8.1.2.192</version>
</dependency>
<dependency>
<groupId>org.apache.kyuubi</groupId>
<artifactId>kyuubi-hive-jdbc</artifactId>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-jdbc</artifactId>
</dependency>
<dependency>
<groupId>io.trino</groupId>
<artifactId>trino-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.jgrapht</groupId>
<artifactId>jgrapht-core</artifactId>
</dependency>
</dependencies>

View File

@@ -5,52 +5,81 @@ import com.tencent.supersonic.headless.api.pojo.DBColumn;
import com.tencent.supersonic.headless.api.pojo.enums.FieldType;
import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
@Slf4j
public abstract class BaseDbAdaptor implements DbAdaptor {
public List<String> getDBs(ConnectInfo connectionInfo) throws SQLException {
@Override
public List<String> getCatalogs(ConnectInfo connectInfo) throws SQLException {
List<String> catalogs = Lists.newArrayList();
try (Connection con = getConnection(connectInfo);
Statement st = con.createStatement();
ResultSet rs = st.executeQuery("SHOW CATALOGS")) {
while (rs.next()) {
catalogs.add(rs.getString(1));
}
}
return catalogs;
}
public List<String> getDBs(ConnectInfo connectionInfo, String catalog) throws SQLException {
// Except for special types implemented separately, the generic logic catalog does not take
// effect.
return getDBs(connectionInfo);
}
protected List<String> getDBs(ConnectInfo connectionInfo) throws SQLException {
List<String> dbs = Lists.newArrayList();
DatabaseMetaData metaData = getDatabaseMetaData(connectionInfo);
try {
ResultSet schemaSet = metaData.getSchemas();
while (schemaSet.next()) {
String db = schemaSet.getString("TABLE_SCHEM");
dbs.add(db);
try (ResultSet schemaSet = getDatabaseMetaData(connectionInfo).getSchemas()) {
while (schemaSet.next()) {
String db = schemaSet.getString("TABLE_SCHEM");
dbs.add(db);
}
}
} catch (Exception e) {
log.info("get meta schemas failed, try to get catalogs");
log.warn("get meta schemas failed", e);
log.warn("get meta schemas failed, try to get catalogs");
}
try {
ResultSet catalogSet = metaData.getCatalogs();
while (catalogSet.next()) {
String db = catalogSet.getString("TABLE_CAT");
dbs.add(db);
try (ResultSet catalogSet = getDatabaseMetaData(connectionInfo).getCatalogs()) {
while (catalogSet.next()) {
String db = catalogSet.getString("TABLE_CAT");
dbs.add(db);
}
}
} catch (Exception e) {
log.info("get meta catalogs failed, try to get schemas");
log.warn("get meta catalogs failed", e);
log.warn("get meta catalogs failed, try to get schemas");
}
return dbs;
}
public List<String> getTables(ConnectInfo connectionInfo, String schemaName)
@Override
public List<String> getTables(ConnectInfo connectInfo, String catalog, String schemaName)
throws SQLException {
// Except for special types implemented separately, the generic logic catalog does not take
// effect.
return getTables(connectInfo, schemaName);
}
protected List<String> getTables(ConnectInfo connectionInfo, String schemaName)
throws SQLException {
List<String> tablesAndViews = new ArrayList<>();
DatabaseMetaData metaData = getDatabaseMetaData(connectionInfo);
try {
ResultSet resultSet = getResultSet(schemaName, metaData);
while (resultSet.next()) {
String name = resultSet.getString("TABLE_NAME");
tablesAndViews.add(name);
try (ResultSet resultSet =
getResultSet(schemaName, getDatabaseMetaData(connectionInfo))) {
while (resultSet.next()) {
String name = resultSet.getString("TABLE_NAME");
tablesAndViews.add(name);
}
}
} catch (SQLException e) {
log.error("Failed to get tables and views", e);
@@ -63,27 +92,35 @@ public abstract class BaseDbAdaptor implements DbAdaptor {
return metaData.getTables(schemaName, schemaName, null, new String[] {"TABLE", "VIEW"});
}
public List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName)
throws SQLException {
List<DBColumn> dbColumns = Lists.newArrayList();
DatabaseMetaData metaData = getDatabaseMetaData(connectInfo);
ResultSet columns = metaData.getColumns(schemaName, schemaName, tableName, null);
while (columns.next()) {
String columnName = columns.getString("COLUMN_NAME");
String dataType = columns.getString("TYPE_NAME");
String remarks = columns.getString("REMARKS");
FieldType fieldType = classifyColumnType(dataType);
dbColumns.add(new DBColumn(columnName, dataType, remarks, fieldType));
public List<DBColumn> getColumns(ConnectInfo connectInfo, String catalog, String schemaName,
String tableName) throws SQLException {
List<DBColumn> dbColumns = new ArrayList<>();
// 确保连接会自动关闭
try (ResultSet columns =
getDatabaseMetaData(connectInfo).getColumns(catalog, schemaName, tableName, null)) {
while (columns.next()) {
String columnName = columns.getString("COLUMN_NAME");
String dataType = columns.getString("TYPE_NAME");
String remarks = columns.getString("REMARKS");
FieldType fieldType = classifyColumnType(dataType);
dbColumns.add(new DBColumn(columnName, dataType, remarks, fieldType));
}
}
return dbColumns;
}
protected DatabaseMetaData getDatabaseMetaData(ConnectInfo connectionInfo) throws SQLException {
Connection connection = DriverManager.getConnection(connectionInfo.getUrl(),
connectionInfo.getUserName(), connectionInfo.getPassword());
Connection connection = getConnection(connectionInfo);
return connection.getMetaData();
}
public Connection getConnection(ConnectInfo connectionInfo) throws SQLException {
final Properties properties = getProperties(connectionInfo);
return DriverManager.getConnection(connectionInfo.getUrl(), properties);
}
public FieldType classifyColumnType(String typeName) {
switch (typeName.toUpperCase()) {
case "INT":
@@ -105,4 +142,24 @@ public abstract class BaseDbAdaptor implements DbAdaptor {
}
}
public Properties getProperties(ConnectInfo connectionInfo) {
final Properties properties = new Properties();
String url = connectionInfo.getUrl().toLowerCase();
// 设置通用属性
properties.setProperty("user", connectionInfo.getUserName());
// 针对 Presto 和 Trino ssl=false 的情况,不需要设置密码
if (url.startsWith("jdbc:presto") || url.startsWith("jdbc:trino")) {
// 检查是否需要处理 SSL
if (!url.contains("ssl=false")) {
properties.setProperty("password", connectionInfo.getPassword());
}
} else {
// 针对其他数据库类型
properties.setProperty("password", connectionInfo.getPassword());
}
return properties;
}
}

View File

@@ -14,12 +14,15 @@ public interface DbAdaptor {
String rewriteSql(String sql);
List<String> getDBs(ConnectInfo connectInfo) throws SQLException;
List<String> getCatalogs(ConnectInfo connectInfo) throws SQLException;
List<String> getTables(ConnectInfo connectInfo, String schemaName) throws SQLException;
List<String> getDBs(ConnectInfo connectInfo, String catalog) throws SQLException;
List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName)
List<String> getTables(ConnectInfo connectInfo, String catalog, String schemaName)
throws SQLException;
List<DBColumn> getColumns(ConnectInfo connectInfo, String catalog, String schemaName,
String tableName) throws SQLException;
FieldType classifyColumnType(String typeName);
}

View File

@@ -18,6 +18,10 @@ public class DbAdaptorFactory {
dbAdaptorMap.put(EngineType.OTHER.getName(), new DefaultDbAdaptor());
dbAdaptorMap.put(EngineType.DUCKDB.getName(), new DuckdbAdaptor());
dbAdaptorMap.put(EngineType.HANADB.getName(), new HanadbAdaptor());
dbAdaptorMap.put(EngineType.STARROCKS.getName(), new StarrocksAdaptor());
dbAdaptorMap.put(EngineType.KYUUBI.getName(), new KyuubiAdaptor());
dbAdaptorMap.put(EngineType.PRESTO.getName(), new PrestoAdaptor());
dbAdaptorMap.put(EngineType.TRINO.getName(), new TrinoAdaptor());
}
public static DbAdaptor getEngineAdaptor(String engineType) {

View File

@@ -19,8 +19,8 @@ public class DuckdbAdaptor extends DefaultDbAdaptor {
return metaData.getTables(schemaName, null, null, new String[] {"TABLE", "VIEW"});
}
public List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName)
throws SQLException {
public List<DBColumn> getColumns(ConnectInfo connectInfo, String catalog, String schemaName,
String tableName) throws SQLException {
List<DBColumn> dbColumns = Lists.newArrayList();
DatabaseMetaData metaData = getDatabaseMetaData(connectInfo);
ResultSet columns = metaData.getColumns(schemaName, null, tableName, null);
@@ -36,6 +36,9 @@ public class DuckdbAdaptor extends DefaultDbAdaptor {
@Override
public String rewriteSql(String sql) {
if (sql == null) {
return null;
}
return sql.replaceAll("`", "");
}

View File

@@ -46,8 +46,8 @@ public class H2Adaptor extends BaseDbAdaptor {
return metaData.getTables(schemaName, null, null, new String[] {"TABLE", "VIEW"});
}
public List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName)
throws SQLException {
public List<DBColumn> getColumns(ConnectInfo connectInfo, String catalog, String schemaName,
String tableName) throws SQLException {
List<DBColumn> dbColumns = Lists.newArrayList();
DatabaseMetaData metaData = getDatabaseMetaData(connectInfo);
ResultSet columns = metaData.getColumns(schemaName, null, tableName, null);

View File

@@ -0,0 +1,86 @@
package com.tencent.supersonic.headless.core.adaptor.db;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.api.pojo.DBColumn;
import com.tencent.supersonic.headless.api.pojo.enums.FieldType;
import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class KyuubiAdaptor extends BaseDbAdaptor {
/** transform YYYYMMDD to YYYY-MM-DD YYYY-MM YYYY-MM-DD(MONDAY) */
@Override
public String getDateFormat(String dateType, String dateFormat, String column) {
if (dateFormat.equalsIgnoreCase(Constants.DAY_FORMAT_INT)) {
if (TimeDimensionEnum.MONTH.name().equalsIgnoreCase(dateType)) {
return String.format("date_format(%s, 'yyyy-MM')", column);
} else if (TimeDimensionEnum.WEEK.name().equalsIgnoreCase(dateType)) {
return String.format("date_format(date_sub(%s, (dayofweek(%s) - 2)), 'yyyy-MM-dd')",
column, column);
} else {
return String.format(
"date_format(to_date(cast(%s as string), 'yyyyMMdd'), 'yyyy-MM-dd')",
column);
}
} else if (dateFormat.equalsIgnoreCase(Constants.DAY_FORMAT)) {
if (TimeDimensionEnum.MONTH.name().equalsIgnoreCase(dateType)) {
return String.format("date_format(%s, 'yyyy-MM')", column);
} else if (TimeDimensionEnum.WEEK.name().equalsIgnoreCase(dateType)) {
return String.format("date_format(date_sub(%s, (dayofweek(%s) - 2)), 'yyyy-MM-dd')",
column, column);
} else {
return column;
}
}
return column;
}
@Override
public List<String> getDBs(ConnectInfo connectionInfo, String catalog) throws SQLException {
List<String> dbs = Lists.newArrayList();
final StringBuilder sql = new StringBuilder("SHOW DATABASES");
if (StringUtils.isNotBlank(catalog)) {
sql.append(" IN ").append(catalog);
}
try (Connection con = getConnection(connectionInfo);
Statement st = con.createStatement();
ResultSet rs = st.executeQuery(sql.toString())) {
while (rs.next()) {
dbs.add(rs.getString(1));
}
}
return dbs;
}
@Override
public List<String> getTables(ConnectInfo connectInfo, String catalog, String schemaName)
throws SQLException {
List<String> tablesAndViews = new ArrayList<>();
try {
try (ResultSet resultSet = getDatabaseMetaData(connectInfo).getTables(catalog,
schemaName, null, new String[] {"TABLE", "VIEW"})) {
while (resultSet.next()) {
String name = resultSet.getString("TABLE_NAME");
tablesAndViews.add(name);
}
}
} catch (SQLException e) {
log.error("Failed to get tables and views", e);
}
return tablesAndViews;
}
@Override
public String rewriteSql(String sql) {
return sql;
}
}

View File

@@ -99,8 +99,8 @@ public class PostgresqlAdaptor extends BaseDbAdaptor {
return tablesAndViews;
}
public List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName)
throws SQLException {
public List<DBColumn> getColumns(ConnectInfo connectInfo, String catalog, String schemaName,
String tableName) throws SQLException {
List<DBColumn> dbColumns = Lists.newArrayList();
DatabaseMetaData metaData = getDatabaseMetaData(connectInfo);
ResultSet columns = metaData.getColumns(null, null, tableName, null);

View File

@@ -0,0 +1,88 @@
package com.tencent.supersonic.headless.core.adaptor.db;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.api.pojo.DBColumn;
import com.tencent.supersonic.headless.api.pojo.enums.FieldType;
import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
import org.apache.commons.lang3.StringUtils;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
public class PrestoAdaptor extends BaseDbAdaptor {
/** transform YYYYMMDD to YYYY-MM-DD YYYY-MM YYYY-MM-DD(MONDAY) */
@Override
public String getDateFormat(String dateType, String dateFormat, String column) {
if (dateFormat.equalsIgnoreCase(Constants.DAY_FORMAT_INT)) {
if (TimeDimensionEnum.MONTH.name().equalsIgnoreCase(dateType)) {
return String.format("date_format(%s, '%%Y-%%m')", column);
} else if (TimeDimensionEnum.WEEK.name().equalsIgnoreCase(dateType)) {
return String.format(
"date_format(date_add('day', - (day_of_week(%s) - 2), %s), '%%Y-%%m-%%d')",
column, column);
} else {
return String.format("date_format(date_parse(%s, '%%Y%%m%%d'), '%%Y-%%m-%%d')",
column);
}
} else if (dateFormat.equalsIgnoreCase(Constants.DAY_FORMAT)) {
if (TimeDimensionEnum.MONTH.name().equalsIgnoreCase(dateType)) {
return String.format("date_format(%s, '%%Y-%%m')", column);
} else if (TimeDimensionEnum.WEEK.name().equalsIgnoreCase(dateType)) {
return String.format(
"date_format(date_add('day', - (day_of_week(%s) - 2), %s), '%%Y-%%m-%%d')",
column, column);
} else {
return column;
}
}
return column;
}
@Override
public List<String> getDBs(ConnectInfo connectionInfo, String catalog) throws SQLException {
List<String> dbs = Lists.newArrayList();
final StringBuilder sql = new StringBuilder("SHOW SCHEMAS");
if (StringUtils.isNotBlank(catalog)) {
sql.append(" IN ").append(catalog);
}
try (Connection con = getConnection(connectionInfo);
Statement st = con.createStatement();
ResultSet rs = st.executeQuery(sql.toString())) {
while (rs.next()) {
dbs.add(rs.getString(1));
}
}
return dbs;
}
@Override
public List<String> getTables(ConnectInfo connectInfo, String catalog, String schemaName)
throws SQLException {
List<String> tablesAndViews = new ArrayList<>();
final StringBuilder sql = new StringBuilder("SHOW TABLES");
if (StringUtils.isNotBlank(catalog)) {
sql.append(" IN ").append(catalog).append(".").append(schemaName);
} else {
sql.append(" IN ").append(schemaName);
}
try (Connection con = getConnection(connectInfo);
Statement st = con.createStatement();
ResultSet rs = st.executeQuery(sql.toString())) {
while (rs.next()) {
tablesAndViews.add(rs.getString(1));
}
}
return tablesAndViews;
}
@Override
public String rewriteSql(String sql) {
return sql;
}
}

View File

@@ -0,0 +1,86 @@
package com.tencent.supersonic.headless.core.adaptor.db;
import com.google.common.collect.Lists;
import com.tencent.supersonic.headless.api.pojo.DBColumn;
import com.tencent.supersonic.headless.api.pojo.enums.FieldType;
import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.Assert;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
@Slf4j
public class StarrocksAdaptor extends MysqlAdaptor {
@Override
public List<String> getDBs(ConnectInfo connectionInfo, String catalog) throws SQLException {
List<String> dbs = Lists.newArrayList();
final StringBuilder sql = new StringBuilder("SHOW DATABASES");
if (StringUtils.isNotBlank(catalog)) {
sql.append(" IN ").append(catalog);
}
try (Connection con = getConnection(connectionInfo);
Statement st = con.createStatement();
ResultSet rs = st.executeQuery(sql.toString())) {
while (rs.next()) {
dbs.add(rs.getString(1));
}
}
return dbs;
}
@Override
public List<String> getTables(ConnectInfo connectInfo, String catalog, String schemaName)
throws SQLException {
List<String> tablesAndViews = new ArrayList<>();
final StringBuilder sql = new StringBuilder("SHOW TABLES");
if (StringUtils.isNotBlank(catalog)) {
sql.append(" IN ").append(catalog).append(".").append(schemaName);
} else {
sql.append(" IN ").append(schemaName);
}
try (Connection con = getConnection(connectInfo);
Statement st = con.createStatement();
ResultSet rs = st.executeQuery(sql.toString())) {
while (rs.next()) {
tablesAndViews.add(rs.getString(1));
}
}
return tablesAndViews;
}
@Override
public List<DBColumn> getColumns(ConnectInfo connectInfo, String catalog, String schemaName,
String tableName) throws SQLException {
List<DBColumn> dbColumns = new ArrayList<>();
try (Connection con = getConnection(connectInfo); Statement st = con.createStatement()) {
// 切换到指定的 catalog或 database/schema这在某些 SQL 方言中很重要
if (StringUtils.isNotBlank(catalog)) {
st.execute("SET CATALOG " + catalog);
}
// 获取 DatabaseMetaData; 需要注意调用此方法的位置(在 USE 之后)
DatabaseMetaData metaData = con.getMetaData();
// 获取特定表的列信息
try (ResultSet columns = metaData.getColumns(schemaName, schemaName, tableName, null)) {
while (columns.next()) {
String columnName = columns.getString("COLUMN_NAME");
String dataType = columns.getString("TYPE_NAME");
String remarks = columns.getString("REMARKS");
FieldType fieldType = classifyColumnType(dataType);
dbColumns.add(new DBColumn(columnName, dataType, remarks, fieldType));
}
}
}
return dbColumns;
}
}

View File

@@ -0,0 +1,8 @@
package com.tencent.supersonic.headless.core.adaptor.db;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
public class TrinoAdaptor extends PrestoAdaptor {
}

View File

@@ -0,0 +1,24 @@
package com.tencent.supersonic.headless.core.translator;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.config.ParameterConfig;
import com.tencent.supersonic.common.pojo.Parameter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import java.util.List;
@Service("HeadlessTranslatorConfig")
@Slf4j
public class TranslatorConfig extends ParameterConfig {
public static final Parameter TRANSLATOR_RESULT_LIMIT =
new Parameter("s2.query-optimizer.resultLimit", "1000", "查询最大返回数据行数",
"为了前端展示性能考虑,请不要设置过大", "number", "语义翻译配置");
@Override
public List<Parameter> getSysParameters() {
return Lists.newArrayList(TRANSLATOR_RESULT_LIMIT);
}
}

View File

@@ -16,6 +16,6 @@ public class ResultLimitOptimizer implements QueryOptimizer {
@Override
public void rewrite(QueryStatement queryStatement) {
queryStatement.setSql(queryStatement.getSql() + " limit " + queryStatement.getLimit());
queryStatement.setSql(queryStatement.getSql() + " LIMIT " + queryStatement.getLimit());
}
}

View File

@@ -2,7 +2,6 @@ package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
@@ -41,9 +40,8 @@ public class DimExpressionParser implements QueryParser {
Map<String, String> bizName2Expr = getDimensionExpressions(semanticSchema, ontologyQuery);
if (!CollectionUtils.isEmpty(bizName2Expr)) {
String sql = SqlReplaceHelper.replaceSqlByExpression(
Constants.TABLE_PREFIX + queryStatement.getDataSetId(), sqlQuery.getSql(),
bizName2Expr);
String sql = SqlReplaceHelper.replaceSqlByExpression(sqlQuery.getTable(),
sqlQuery.getSql(), bizName2Expr);
sqlQuery.setSql(sql);
}
}

View File

@@ -2,7 +2,6 @@ package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
@@ -40,9 +39,8 @@ public class MetricExpressionParser implements QueryParser {
Map<String, String> bizName2Expr = getMetricExpressions(semanticSchema, ontologyQuery);
if (!CollectionUtils.isEmpty(bizName2Expr)) {
String sql = SqlReplaceHelper.replaceSqlByExpression(
Constants.TABLE_PREFIX + queryStatement.getDataSetId(), sqlQuery.getSql(),
bizName2Expr);
String sql = SqlReplaceHelper.replaceSqlByExpression(sqlQuery.getTable(),
sqlQuery.getSql(), bizName2Expr);
sqlQuery.setSql(sql);
}
}
@@ -110,7 +108,7 @@ public class MetricExpressionParser implements QueryParser {
if (allMeasures.containsKey(field)) {
Measure measure = allMeasures.get(field);
String expr = metricExpr;
if (Objects.nonNull(measure.getAgg())) {
if (StringUtils.isNotBlank(measure.getAgg())) {
expr = String.format("%s (%s)", measure.getAgg(), metricExpr);
}
replace.put(field, expr);

View File

@@ -10,6 +10,7 @@ import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.QueryState;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
@@ -68,6 +69,8 @@ public class SqlQueryParser implements QueryParser {
ontologyQuery.setAggOption(sqlQueryAggOption);
convertNameToBizName(queryStatement);
// Solve the problem of SQL execution error when alias is Chinese
aliasesWithBackticks(queryStatement);
rewriteOrderBy(queryStatement);
// fill sqlQuery
@@ -75,7 +78,7 @@ public class SqlQueryParser implements QueryParser {
if (StringUtils.isEmpty(tableName)) {
return;
}
sqlQuery.setTable(tableName.toLowerCase());
sqlQuery.setTable(Constants.TABLE_PREFIX + queryStatement.getDataSetId());
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
SemanticSchemaResp semanticSchema = queryStatement.getSemanticSchema();
if (!sqlGenerateUtils.isSupportWith(
@@ -88,6 +91,12 @@ public class SqlQueryParser implements QueryParser {
log.info("parse sqlQuery [{}] ", sqlQuery);
}
private void aliasesWithBackticks(QueryStatement queryStatement) {
String sql = queryStatement.getSqlQuery().getSql();
sql = SqlReplaceHelper.replaceAliasWithBackticks(sql);
queryStatement.getSqlQuery().setSql(sql);
}
private AggOption getAggOption(String sql, Set<MetricSchemaResp> metricSchemas) {
if (SqlSelectFunctionHelper.hasAggregateFunction(sql)) {
return AggOption.AGGREGATION;
@@ -180,12 +189,10 @@ public class SqlQueryParser implements QueryParser {
String modelName = entry.getKey();
entry.getValue().forEach(m -> {
if (fields.contains(m.getName()) || fields.contains(m.getBizName())) {
if (!ontologyQuery.getMetricMap().containsKey(modelName)) {
ontologyQuery.getMetricMap().put(modelName, Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getMetricMap().get(modelName).add(m);
ontologyQuery.getMetricMap().computeIfAbsent(modelName, k -> Sets.newHashSet())
.add(m);
fields.remove(m.getName());
fields.remove(m.getBizName());
}
@@ -199,33 +206,51 @@ public class SqlQueryParser implements QueryParser {
String modelName = entry.getKey();
entry.getValue().forEach(d -> {
if (fields.contains(d.getName()) || fields.contains(d.getBizName())) {
if (!ontologyQuery.getDimensionMap().containsKey(entry.getKey())) {
ontologyQuery.getDimensionMap().put(entry.getKey(),
Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getDimensionMap().get(entry.getKey()).add(d);
ontologyQuery.getDimensionMap()
.computeIfAbsent(modelName, k -> Sets.newHashSet()).add(d);
fields.remove(d.getName());
fields.remove(d.getBizName());
}
});
});
// if there are still fields not found belonging models, try to find in the models without
// querying metrics.
// second, try to find a model that has all the remaining fields, such that no further join
// is needed.
if (!fields.isEmpty()) {
Map<String, Set<DimSchemaResp>> model2dims = new HashMap<>();
ontology.getDimensionMap().entrySet().forEach(entry -> {
String modelName = entry.getKey();
entry.getValue().forEach(d -> {
if (fields.contains(d.getName()) || fields.contains(d.getBizName())) {
model2dims.computeIfAbsent(modelName, k -> Sets.newHashSet()).add(d);
}
});
});
Optional<Map.Entry<String, Set<DimSchemaResp>>> modelEntry = model2dims.entrySet()
.stream().filter(entry -> entry.getValue().size() == fields.size()).findFirst();
if (modelEntry.isPresent()) {
ontologyQuery.getDimensionMap().put(modelEntry.get().getKey(),
modelEntry.get().getValue());
ontologyQuery.getModelMap().put(modelEntry.get().getKey(),
ontology.getModelMap().get(modelEntry.get().getKey()));
fields.clear();
}
}
// finally if there are still fields not found belonging models, try to find in the models
// iteratively
if (!fields.isEmpty()) {
ontology.getDimensionMap().entrySet().forEach(entry -> {
String modelName = entry.getKey();
if (!ontologyQuery.getDimensionMap().containsKey(modelName)) {
entry.getValue().forEach(d -> {
if (fields.contains(d.getName()) || fields.contains(d.getBizName())) {
if (!ontologyQuery.getDimensionMap().containsKey(modelName)) {
ontologyQuery.getDimensionMap().put(modelName, Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getDimensionMap().get(modelName).add(d);
ontologyQuery.getDimensionMap()
.computeIfAbsent(modelName, k -> Sets.newHashSet()).add(d);
fields.remove(d.getName());
fields.remove(d.getBizName());
}

View File

@@ -1,5 +1,6 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.google.common.collect.Sets;
import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.Dimension;
@@ -9,17 +10,26 @@ import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.core.pojo.*;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.Constants;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.*;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.util.CollectionUtils;
import org.jgrapht.Graph;
import org.jgrapht.GraphPath;
import org.jgrapht.alg.shortestpath.DijkstraShortestPath;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.graph.DefaultUndirectedGraph;
import java.util.*;
import java.util.stream.Collectors;
@@ -37,6 +47,8 @@ public class SqlBuilder {
public String buildOntologySql(QueryStatement queryStatement) throws Exception {
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
Ontology ontology = queryStatement.getOntology();
if (ontologyQuery.getLimit() == null) {
ontologyQuery.setLimit(0L);
}
@@ -46,7 +58,14 @@ public class SqlBuilder {
throw new Exception("data model not found");
}
TableView tableView = render(ontologyQuery, new ArrayList<>(dataModels), scope, schema);
TableView tableView;
if (!CollectionUtils.isEmpty(ontology.getJoinRelations()) && dataModels.size() > 1) {
Set<ModelResp> models = probeRelatedModels(dataModels, queryStatement.getOntology());
tableView = render(ontologyQuery, models, scope, schema);
} else {
tableView = render(ontologyQuery, dataModels, scope, schema);
}
SqlNode parserNode = tableView.build();
DatabaseResp database = queryStatement.getOntology().getDatabase();
EngineType engineType = EngineType.fromString(database.getType());
@@ -54,7 +73,61 @@ public class SqlBuilder {
return SemanticNode.getSql(parserNode, engineType);
}
private SqlNode optimizeParseNode(SqlNode parserNode, EngineType engineType) {
private Set<ModelResp> probeRelatedModels(Set<ModelResp> dataModels, Ontology ontology) {
List<JoinRelation> joinRelations = ontology.getJoinRelations();
Graph<String, DefaultEdge> graph = buildGraph(joinRelations);
DijkstraShortestPath<String, DefaultEdge> dijkstraAlg = new DijkstraShortestPath<>(graph);
Set<String> queryModels =
dataModels.stream().map(ModelResp::getName).collect(Collectors.toSet());
GraphPath<String, DefaultEdge> selectedGraphPath = null;
for (String fromModel : queryModels) {
for (String toModel : queryModels) {
if (fromModel != toModel) {
GraphPath<String, DefaultEdge> path = dijkstraAlg.getPath(fromModel, toModel);
if (isGraphPathContainsAll(path, queryModels)) {
selectedGraphPath = path;
break;
}
}
}
}
if (selectedGraphPath == null) {
return dataModels;
}
Set<String> modelNames = Sets.newHashSet();
for (DefaultEdge edge : selectedGraphPath.getEdgeList()) {
modelNames.add(selectedGraphPath.getGraph().getEdgeSource(edge));
modelNames.add(selectedGraphPath.getGraph().getEdgeTarget(edge));
}
return modelNames.stream().map(m -> ontology.getModelMap().get(m))
.collect(Collectors.toSet());
}
private boolean isGraphPathContainsAll(GraphPath<String, DefaultEdge> graphPath,
Set<String> vertex) {
Set<String> allVertex = Sets.newHashSet();
for (DefaultEdge edge : graphPath.getEdgeList()) {
allVertex.add(graphPath.getGraph().getEdgeSource(edge));
allVertex.add(graphPath.getGraph().getEdgeTarget(edge));
}
Collection<String> intersect =
org.apache.commons.collections.CollectionUtils.intersection(vertex, allVertex);
return intersect.size() == vertex.size() ? true : false;
}
private Graph<String, DefaultEdge> buildGraph(List<JoinRelation> joinRelations) {
Graph<String, DefaultEdge> directedGraph = new DefaultUndirectedGraph<>(DefaultEdge.class);
for (JoinRelation joinRelation : joinRelations) {
directedGraph.addVertex(joinRelation.getLeft());
directedGraph.addVertex(joinRelation.getRight());
directedGraph.addEdge(joinRelation.getLeft(), joinRelation.getRight());
}
return directedGraph;
}
private SqlNode optimizeParseNode(SqlNode parserNode, EngineType engineType)
throws SqlParseException {
if (Objects.isNull(schema.getRuntimeOptions())
|| Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|| !schema.getRuntimeOptions().getEnableOptimize()) {
@@ -62,14 +135,10 @@ public class SqlBuilder {
}
SqlNode optimizeNode = null;
try {
SqlNode sqlNode = SqlParser.create(SemanticNode.getSql(parserNode, engineType),
Configuration.getParserConfig(engineType)).parseStmt();
if (Objects.nonNull(sqlNode)) {
optimizeNode = SemanticNode.optimize(scope, schema, sqlNode, engineType);
}
} catch (Exception e) {
log.error("optimize error {}", e);
SqlNode sqlNode = SqlParser.create(SemanticNode.getSql(parserNode, engineType),
Configuration.getParserConfig(engineType)).parseStmt();
if (Objects.nonNull(sqlNode)) {
optimizeNode = SemanticNode.optimize(scope, schema, sqlNode, engineType);
}
if (Objects.nonNull(optimizeNode)) {
@@ -79,7 +148,7 @@ public class SqlBuilder {
return parserNode;
}
private TableView render(OntologyQuery ontologyQuery, List<ModelResp> dataModels,
private TableView render(OntologyQuery ontologyQuery, Set<ModelResp> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema) throws Exception {
SqlNode left = null;
TableView leftTable = null;
@@ -88,8 +157,7 @@ public class SqlBuilder {
Map<String, String> beforeModels = new HashMap<>();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
for (int i = 0; i < dataModels.size(); i++) {
final ModelResp dataModel = dataModels.get(i);
for (ModelResp dataModel : dataModels) {
final Set<DimSchemaResp> queryDimensions =
ontologyQuery.getDimensionsByModel(dataModel.getName());
final Set<MetricSchemaResp> queryMetrics =
@@ -141,7 +209,8 @@ public class SqlBuilder {
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
JoinRelation matchJoinRelation = getMatchJoinRelation(before, rightTable, schema);
SqlNode joinRelationCondition;
if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) {
if (!org.apache.commons.collections.CollectionUtils
.isEmpty(matchJoinRelation.getJoinCondition())) {
sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType());
joinRelationCondition = getCondition(matchJoinRelation, scope, engineType);
condition = joinRelationCondition;
@@ -170,12 +239,19 @@ public class SqlBuilder {
} else if (joinRelation.getLeft()
.equalsIgnoreCase(tableView.getDataModel().getName())
&& before.containsKey(joinRelation.getRight())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
List<Triple<String, String, String>> candidateJoinCon = joinRelation
.getJoinCondition().stream()
.map(r -> Triple.of(
before.get(joinRelation.getRight()) + "." + r.getRight(),
r.getMiddle(), tableView.getAlias() + "." + r.getLeft()))
.collect(Collectors.toList()));
matchJoinRelation.setJoinType(joinRelation.getJoinType());
.collect(Collectors.toList());
// added by jerryjzhang on 20250214
// use the one with the most conditions to join left and right tables
if (matchJoinRelation.getJoinCondition() == null || candidateJoinCon
.size() > matchJoinRelation.getJoinCondition().size()) {
matchJoinRelation.setJoinCondition(candidateJoinCon);
matchJoinRelation.setJoinType(joinRelation.getJoinType());
}
}
}
}

View File

@@ -40,11 +40,24 @@ public class JdbcDataSourceUtils {
log.error(e.toString(), e);
return false;
}
try (Connection con = DriverManager.getConnection(database.getUrl(), database.getUsername(),
database.passwordDecrypt())) {
return con != null;
} catch (SQLException e) {
log.error(e.toString(), e);
// presto/trino ssl=false connection need password
if (database.getUrl().startsWith("jdbc:presto")
|| database.getUrl().startsWith("jdbc:trino")) {
if (database.getUrl().toLowerCase().contains("ssl=false")) {
try (Connection con = DriverManager.getConnection(database.getUrl(),
database.getUsername(), null)) {
return con != null;
} catch (SQLException e) {
log.error(e.toString(), e);
}
}
} else {
try (Connection con = DriverManager.getConnection(database.getUrl(),
database.getUsername(), database.passwordDecrypt())) {
return con != null;
} catch (SQLException e) {
log.error(e.toString(), e);
}
}
return false;

View File

@@ -73,30 +73,46 @@ public class SqlGenerateUtils {
public String getSelect(StructQuery structQuery) {
String aggStr = structQuery.getAggregators().stream().map(this::getSelectField)
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQuery.getGroups()) + "," + aggStr;
String result = String.join(",", structQuery.getGroups());
if (StringUtils.isNotBlank(aggStr)) {
if (!CollectionUtils.isEmpty(structQuery.getGroups())) {
result = String.join(",", structQuery.getGroups()) + "," + aggStr;
} else {
result = aggStr;
}
}
return result;
}
public String getSelect(StructQuery structQuery, Map<String, String> deriveMetrics) {
String aggStr = structQuery.getAggregators().stream()
.map(a -> getSelectField(a, deriveMetrics)).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQuery.getGroups()) + "," + aggStr;
String result = String.join(",", structQuery.getGroups());
if (StringUtils.isNotBlank(aggStr)) {
if (!CollectionUtils.isEmpty(structQuery.getGroups())) {
result = String.join(",", structQuery.getGroups()) + "," + aggStr;
} else {
result = aggStr;
}
}
return result;
}
public String getSelectField(final Aggregator agg) {
if (AggOperatorEnum.COUNT_DISTINCT.equals(agg.getFunc())) {
return "count(distinct " + agg.getColumn() + " ) AS " + agg.getColumn() + " ";
return "count(distinct " + agg.getColumn() + " ) ";
}
if (CollectionUtils.isEmpty(agg.getArgs())) {
return agg.getFunc() + "( " + agg.getColumn() + " ) AS " + agg.getColumn() + " ";
return agg.getFunc() + "( " + agg.getColumn() + " ) ";
}
return agg.getFunc() + "( "
+ agg.getArgs().stream()
.map(arg -> arg.equals(agg.getColumn()) ? arg
: (StringUtils.isNumeric(arg) ? arg : ("'" + arg + "'")))
.collect(Collectors.joining(","))
+ " ) AS " + agg.getColumn() + " ";
+ " ) ";
}
public String getSelectField(final Aggregator agg, Map<String, String> deriveMetrics) {
@@ -140,7 +156,10 @@ public class SqlGenerateUtils {
public String generateWhere(StructQuery structQuery, ItemDateResp itemDateResp) {
String whereClauseFromFilter =
sqlFilterUtils.getWhereClause(structQuery.getDimensionFilters());
String whereFromDate = getDateWhereClause(structQuery.getDateInfo(), itemDateResp);
String whereFromDate = "";
if (structQuery.getDateInfo() != null) {
whereFromDate = getDateWhereClause(structQuery.getDateInfo(), itemDateResp);
}
String mergedWhere =
mergeDateWhereClause(structQuery, whereClauseFromFilter, whereFromDate);
if (StringUtils.isNotBlank(mergedWhere)) {

View File

@@ -7,6 +7,7 @@ import com.tencent.supersonic.auth.api.authorization.request.QueryAuthResReq;
import com.tencent.supersonic.auth.api.authorization.response.AuthorizedResourceResp;
import com.tencent.supersonic.auth.api.authorization.service.AuthService;
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.QueryAuthorization;
import com.tencent.supersonic.common.pojo.User;
@@ -73,6 +74,15 @@ public class S2DataPermissionAspect {
SemanticQueryReq queryReq = null;
if (objects[0] instanceof SemanticQueryReq) {
queryReq = (SemanticQueryReq) objects[0];
if (queryReq instanceof QuerySqlReq) {
QuerySqlReq sqlReq = (QuerySqlReq) queryReq;
if (sqlReq.getDataSetName() != null) {
String escapedTable = SqlReplaceHelper.escapeTableName(sqlReq.getDataSetName());
sqlReq.setSql(sqlReq.getSql().replaceAll(
String.format(" %s ", sqlReq.getDataSetName()),
String.format(" %s ", escapedTable)));
}
}
}
if (queryReq == null) {
throw new InvalidArgumentException("queryReq is not Invalid");

View File

@@ -65,14 +65,15 @@ public class S2ChatLayerService implements ChatLayerService {
@Override
public ParseResp parse(QueryNLReq queryNLReq) {
ParseResp parseResult = new ParseResp(queryNLReq.getQueryText());
ParseResp parseResp = new ParseResp(queryNLReq.getQueryText());
ChatQueryContext queryCtx = buildChatQueryContext(queryNLReq);
queryCtx.setParseResp(parseResp);
if (queryCtx.getMapInfo().isEmpty()) {
chatWorkflowEngine.start(ChatWorkflowState.MAPPING, queryCtx, parseResult);
chatWorkflowEngine.start(ChatWorkflowState.MAPPING, queryCtx);
} else {
chatWorkflowEngine.start(ChatWorkflowState.PARSING, queryCtx, parseResult);
chatWorkflowEngine.start(ChatWorkflowState.PARSING, queryCtx);
}
return parseResult;
return parseResp;
}
public void correct(QuerySqlReq querySqlReq, User user) {

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.server.facade.service.impl;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.QueryColumn;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum;
@@ -23,6 +24,7 @@ import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.translator.SemanticTranslator;
import com.tencent.supersonic.headless.core.translator.TranslatorConfig;
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.server.annotation.S2DataPermission;
import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService;
@@ -58,6 +60,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
private final KnowledgeBaseService knowledgeBaseService;
private final MetricService metricService;
private final DimensionService dimensionService;
private final TranslatorConfig translatorConfig;
private final QueryCache queryCache = ComponentFactory.getQueryCache();
private final List<QueryExecutor> queryExecutors = ComponentFactory.getQueryExecutors();
@@ -66,7 +69,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
SchemaService schemaService, SemanticTranslator semanticTranslator,
MetricDrillDownChecker metricDrillDownChecker,
KnowledgeBaseService knowledgeBaseService, MetricService metricService,
DimensionService dimensionService) {
DimensionService dimensionService, TranslatorConfig translatorConfig) {
this.statUtils = statUtils;
this.queryUtils = queryUtils;
this.semanticSchemaManager = semanticSchemaManager;
@@ -77,6 +80,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
this.knowledgeBaseService = knowledgeBaseService;
this.metricService = metricService;
this.dimensionService = dimensionService;
this.translatorConfig = translatorConfig;
}
public DataSetSchema getDataSetSchema(Long id) {
@@ -299,6 +303,8 @@ public class S2SemanticLayerService implements SemanticLayerService {
QueryStatement queryStatement = new QueryStatement();
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
queryStatement.setLimit(Integer.parseInt(
translatorConfig.getParameterValue(TranslatorConfig.TRANSLATOR_RESULT_LIMIT)));
queryStatement.setDataSetId(queryReq.getDataSetId());
queryStatement.setDataSetName(queryReq.getDataSetName());
queryStatement.setSemanticSchema(semanticSchemaResp);
@@ -323,6 +329,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
DataSetResp dataSetResp = dataSetService.getDataSet(querySqlReq.getDataSetId());
queryStatement.setDataSetId(dataSetResp.getId());
queryStatement.setDataSetName(dataSetResp.getName());
sqlQuery.setTable(Constants.TABLE_PREFIX + dataSetResp.getId());
}
return queryStatement;
}

View File

@@ -16,6 +16,10 @@ public class DbParameterFactory {
parametersBuilder.put(EngineType.MYSQL.getName(), new MysqlParametersBuilder());
parametersBuilder.put(EngineType.POSTGRESQL.getName(), new PostgresqlParametersBuilder());
parametersBuilder.put(EngineType.HANADB.getName(), new HanadbParametersBuilder());
parametersBuilder.put(EngineType.STARROCKS.getName(), new StarrocksParametersBuilder());
parametersBuilder.put(EngineType.KYUUBI.getName(), new KyuubiParametersBuilder());
parametersBuilder.put(EngineType.PRESTO.getName(), new PrestoParametersBuilder());
parametersBuilder.put(EngineType.TRINO.getName(), new TrinoParametersBuilder());
parametersBuilder.put(EngineType.OTHER.getName(), new OtherParametersBuilder());
}

View File

@@ -29,6 +29,7 @@ public class DefaultParametersBuilder implements DbParametersBuilder {
password.setComment("密码");
password.setName("password");
password.setPlaceholder("请输入密码");
password.setRequire(false);
databaseParameters.add(password);
return databaseParameters;

View File

@@ -0,0 +1,16 @@
package com.tencent.supersonic.headless.server.pojo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@Slf4j
public class KyuubiParametersBuilder extends DefaultParametersBuilder {
@Override
public List<DatabaseParameter> build() {
return super.build();
}
}

View File

@@ -0,0 +1,16 @@
package com.tencent.supersonic.headless.server.pojo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@Slf4j
public class PrestoParametersBuilder extends DefaultParametersBuilder {
@Override
public List<DatabaseParameter> build() {
return super.build();
}
}

View File

@@ -0,0 +1,16 @@
package com.tencent.supersonic.headless.server.pojo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@Slf4j
public class StarrocksParametersBuilder extends DefaultParametersBuilder {
@Override
public List<DatabaseParameter> build() {
return super.build();
}
}

View File

@@ -0,0 +1,16 @@
package com.tencent.supersonic.headless.server.pojo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@Slf4j
public class TrinoParametersBuilder extends DefaultParametersBuilder {
@Override
public List<DatabaseParameter> build() {
return super.build();
}
}

View File

@@ -76,22 +76,30 @@ public class DatabaseController {
return databaseService.executeSql(sqlExecuteReq, user);
}
@RequestMapping("/getCatalogs")
public List<String> getCatalogs(@RequestParam("id") Long databaseId) throws SQLException {
return databaseService.getCatalogs(databaseId);
}
@RequestMapping("/getDbNames")
public List<String> getDbNames(@RequestParam("id") Long databaseId) throws SQLException {
return databaseService.getDbNames(databaseId);
public List<String> getDbNames(@RequestParam("id") Long databaseId,
@RequestParam(value = "catalog", required = false) String catalog) throws SQLException {
return databaseService.getDbNames(databaseId, catalog);
}
@RequestMapping("/getTables")
public List<String> getTables(@RequestParam("databaseId") Long databaseId,
@RequestParam(value = "catalog", required = false) String catalog,
@RequestParam("db") String db) throws SQLException {
return databaseService.getTables(databaseId, db);
return databaseService.getTables(databaseId, catalog, db);
}
@RequestMapping("/getColumnsByName")
public List<DBColumn> getColumnsByName(@RequestParam("databaseId") Long databaseId,
@RequestParam(name = "catalog", required = false) String catalog,
@RequestParam("db") String db, @RequestParam("table") String table)
throws SQLException {
return databaseService.getColumns(databaseId, db, table);
return databaseService.getColumns(databaseId, catalog, db, table);
}
@PostMapping("/listColumnsBySql")

View File

@@ -36,13 +36,15 @@ public interface DatabaseService {
void deleteDatabase(Long databaseId);
List<String> getDbNames(Long id) throws SQLException;
List<String> getCatalogs(Long id) throws SQLException;
List<String> getTables(Long id, String db) throws SQLException;
List<String> getDbNames(Long id, String catalog) throws SQLException;
List<String> getTables(Long id, String catalog, String db) throws SQLException;
Map<String, List<DBColumn>> getDbColumns(ModelBuildReq modelBuildReq) throws SQLException;
List<DBColumn> getColumns(Long id, String db, String table) throws SQLException;
List<DBColumn> getColumns(Long id, String catalog, String db, String table) throws SQLException;
List<DBColumn> getColumns(Long id, String sql) throws SQLException;
}

View File

@@ -200,17 +200,24 @@ public class DatabaseServiceImpl extends ServiceImpl<DatabaseDOMapper, DatabaseD
}
@Override
public List<String> getDbNames(Long id) throws SQLException {
public List<String> getCatalogs(Long id) throws SQLException {
DatabaseResp databaseResp = getDatabase(id);
DbAdaptor dbAdaptor = DbAdaptorFactory.getEngineAdaptor(databaseResp.getType());
return dbAdaptor.getDBs(DatabaseConverter.getConnectInfo(databaseResp));
return dbAdaptor.getCatalogs(DatabaseConverter.getConnectInfo(databaseResp));
}
@Override
public List<String> getTables(Long id, String db) throws SQLException {
public List<String> getDbNames(Long id, String catalog) throws SQLException {
DatabaseResp databaseResp = getDatabase(id);
DbAdaptor dbAdaptor = DbAdaptorFactory.getEngineAdaptor(databaseResp.getType());
return dbAdaptor.getTables(DatabaseConverter.getConnectInfo(databaseResp), db);
return dbAdaptor.getDBs(DatabaseConverter.getConnectInfo(databaseResp), catalog);
}
@Override
public List<String> getTables(Long id, String catalog, String db) throws SQLException {
DatabaseResp databaseResp = getDatabase(id);
DbAdaptor dbAdaptor = DbAdaptorFactory.getEngineAdaptor(databaseResp.getType());
return dbAdaptor.getTables(DatabaseConverter.getConnectInfo(databaseResp), catalog, db);
}
@Override
@@ -226,8 +233,8 @@ public class DatabaseServiceImpl extends ServiceImpl<DatabaseDOMapper, DatabaseD
dbColumnMap.put(modelBuildReq.getSql(), columns);
} else {
for (String table : modelBuildReq.getTables()) {
List<DBColumn> columns =
getColumns(modelBuildReq.getDatabaseId(), modelBuildReq.getDb(), table);
List<DBColumn> columns = getColumns(modelBuildReq.getDatabaseId(),
modelBuildReq.getCatalog(), modelBuildReq.getDb(), table);
dbColumnMap.put(table, columns);
}
}
@@ -235,15 +242,17 @@ public class DatabaseServiceImpl extends ServiceImpl<DatabaseDOMapper, DatabaseD
}
@Override
public List<DBColumn> getColumns(Long id, String db, String table) throws SQLException {
public List<DBColumn> getColumns(Long id, String catalog, String db, String table)
throws SQLException {
DatabaseResp databaseResp = getDatabase(id);
return getColumns(databaseResp, db, table);
return getColumns(databaseResp, catalog, db, table);
}
public List<DBColumn> getColumns(DatabaseResp databaseResp, String db, String table)
throws SQLException {
public List<DBColumn> getColumns(DatabaseResp databaseResp, String catalog, String db,
String table) throws SQLException {
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(databaseResp.getType());
return engineAdaptor.getColumns(DatabaseConverter.getConnectInfo(databaseResp), db, table);
return engineAdaptor.getColumns(DatabaseConverter.getConnectInfo(databaseResp), catalog, db,
table);
}
@Override

View File

@@ -104,11 +104,11 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
} else {
MetricResp metricRespByBizName = bizNameMap.get(metric.getBizName());
MetricResp metricRespByName = nameMap.get(metric.getName());
if (null != metricRespByBizName && isChange(metric, metricRespByBizName)) {
if (null != metricRespByBizName) {
metric.setId(metricRespByBizName.getId());
this.updateMetric(metric, user);
} else {
if (null != metricRespByName && isChange(metric, metricRespByName)) {
if (null != metricRespByName) {
metric.setId(metricRespByName.getId());
this.updateMetric(metric, user);
}
@@ -819,7 +819,7 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
return modelResps.stream().map(ModelResp::getId).collect(Collectors.toSet());
}
private boolean isChange(MetricReq metricReq, MetricResp metricResp) {
private boolean isNameChange(MetricReq metricReq, MetricResp metricResp) {
boolean isNameChange = !metricReq.getName().equals(metricResp.getName());
return isNameChange;
}

View File

@@ -61,15 +61,7 @@ import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
@@ -150,8 +142,11 @@ public class ModelServiceImpl implements ModelService {
@Override
@Transactional
public ModelResp updateModel(ModelReq modelReq, User user) throws Exception {
// checkParams(modelReq);
checkRelations(modelReq);
// Comment out below checks for now, they seem unnecessary and
// lead to unexpected exception in updating model
/*
* checkParams(modelReq); checkRelations(modelReq);
*/
ModelDO modelDO = modelRepository.getModelById(modelReq.getId());
ModelConverter.convert(modelDO, modelReq, user);
modelRepository.updateModel(modelDO);
@@ -372,7 +367,11 @@ public class ModelServiceImpl implements ModelService {
metaFilter.setModelIds(Lists.newArrayList(modelId));
List<MetricResp> metricResps = metricService.getMetrics(metaFilter);
List<DimensionResp> dimensionResps = dimensionService.getDimensions(metaFilter);
if (!CollectionUtils.isEmpty(metricResps) || !CollectionUtils.isEmpty(dimensionResps)) {
boolean validMetric = metricResps.stream().anyMatch(
metricResp -> Objects.equals(metricResp.getStatus(), StatusEnum.ONLINE.getCode()));
boolean validDimension = dimensionResps.stream().anyMatch(dimensionResp -> Objects
.equals(dimensionResp.getStatus(), StatusEnum.ONLINE.getCode()));
if (validMetric || validDimension) {
throw new RuntimeException("存在基于该模型创建的指标和维度, 暂不能删除, 请确认");
}
}

View File

@@ -197,6 +197,7 @@ public class SchemaServiceImpl implements SchemaService {
DatabaseResp databaseResp = databaseService
.getDatabase(dataSetSchemaResp.getModelResps().get(0).getDatabaseId());
dataSetSchemaResp.setDatabaseType(databaseResp.getType());
dataSetSchemaResp.setDatabaseVersion(databaseResp.getVersion());
}
dataSetSchemaResps.add(dataSetSchemaResp);
}

View File

@@ -34,8 +34,8 @@ public class ChatWorkflowEngine {
private final List<SemanticCorrector> semanticCorrectors =
CoreComponentFactory.getSemanticCorrectors();
public void start(ChatWorkflowState initialState, ChatQueryContext queryCtx,
ParseResp parseResult) {
public void start(ChatWorkflowState initialState, ChatQueryContext queryCtx) {
ParseResp parseResult = queryCtx.getParseResp();
queryCtx.setChatWorkflowState(initialState);
while (queryCtx.getChatWorkflowState() != ChatWorkflowState.FINISHED) {
switch (queryCtx.getChatWorkflowState()) {
@@ -61,14 +61,14 @@ public class ChatWorkflowEngine {
.map(SemanticQuery::getParseInfo).collect(Collectors.toList());
parseResult.setSelectedParses(parseInfos);
if (queryCtx.needSQL()) {
queryCtx.setChatWorkflowState(ChatWorkflowState.CORRECTING);
queryCtx.setChatWorkflowState(ChatWorkflowState.S2SQL_CORRECTING);
} else {
parseResult.setState(ParseResp.ParseState.COMPLETED);
queryCtx.setChatWorkflowState(ChatWorkflowState.FINISHED);
}
}
break;
case CORRECTING:
case S2SQL_CORRECTING:
performCorrecting(queryCtx);
queryCtx.setChatWorkflowState(ChatWorkflowState.TRANSLATING);
break;
@@ -109,7 +109,8 @@ public class ChatWorkflowEngine {
for (SemanticQuery semanticQuery : candidateQueries) {
for (SemanticCorrector corrector : semanticCorrectors) {
corrector.correct(queryCtx, semanticQuery.getParseInfo());
if (!ChatWorkflowState.CORRECTING.equals(queryCtx.getChatWorkflowState())) {
if (!ChatWorkflowState.S2SQL_CORRECTING
.equals(queryCtx.getChatWorkflowState())) {
break;
}
}

View File

@@ -24,6 +24,7 @@ public class DataSetSchemaBuilder {
.bizName(resp.getBizName()).type(SchemaElementType.DATASET).build();
dataSetSchema.setDataSet(dataSet);
dataSetSchema.setDatabaseType(resp.getDatabaseType());
dataSetSchema.setDatabaseVersion(resp.getDatabaseVersion());
Set<SchemaElement> metrics = getMetrics(resp);
dataSetSchema.getMetrics().addAll(metrics);

Some files were not shown because too many files have changed in this diff Show More