diff --git a/.asf.yaml b/.asf.yaml index 8a85ba92260..c9582c69ade 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -57,7 +57,13 @@ github: required_pull_request_reviews: dismiss_stale_reviews: true required_approving_review_count: 2 - dev-1.2.0: + dev-1.3.1: + required_status_checks: + strict: true + required_pull_request_reviews: + dismiss_stale_reviews: true + required_approving_review_count: 1 + dev-1.3.2: required_status_checks: strict: true required_pull_request_reviews: diff --git a/.github/workflows/auto-format-pr.yaml b/.github/workflows/auto-format-pr.yaml index dd79a999506..301d91c76b1 100644 --- a/.github/workflows/auto-format-pr.yaml +++ b/.github/workflows/auto-format-pr.yaml @@ -44,8 +44,7 @@ jobs: distribution: 'adopt' - name: Code Format Apply - run: | - ./mvnw -N install + run: ./mvnw spotless:apply - name: Create Pull Request diff --git a/.github/workflows/build-backend.yml b/.github/workflows/build-backend.yml index 8b694ed15b6..33e311eec2b 100644 --- a/.github/workflows/build-backend.yml +++ b/.github/workflows/build-backend.yml @@ -34,11 +34,9 @@ jobs: with: distribution: 'adopt' java-version: 8 - - name: Build backend by maven - run: | - ./mvnw -N install - ./mvnw clean package + run: + ./mvnw clean package - name: Upload coverage to Codecov uses: codecov/codecov-action@v3.0.0 # with: diff --git a/.github/workflows/check-code-format.yml b/.github/workflows/check-code-format.yml index 21ae2ec4dff..0323eb9803d 100644 --- a/.github/workflows/check-code-format.yml +++ b/.github/workflows/check-code-format.yml @@ -31,6 +31,5 @@ jobs: java-version: '8' distribution: 'adopt' - name: Code format check - run: | - ./mvnw -N install + run: ./mvnw spotless:check diff --git a/.github/workflows/check-license.yml b/.github/workflows/check-license.yml index c2d1d6c4a20..3c79607dc32 100644 --- a/.github/workflows/check-license.yml +++ b/.github/workflows/check-license.yml @@ -30,9 +30,6 @@ jobs: with: java-version: '8' distribution: 'adopt' - - name: mvn -N install - run: - mvn -N install - name: License check with Maven run: | rat_file=`mvn apache-rat:check | { grep -oe "\\S\\+/rat.txt" || true; }` diff --git a/.github/workflows/check-third-party-dependencies.yml b/.github/workflows/check-third-party-dependencies.yml index 25fb0bd7f52..1b9339b14a2 100644 --- a/.github/workflows/check-third-party-dependencies.yml +++ b/.github/workflows/check-third-party-dependencies.yml @@ -33,16 +33,13 @@ jobs: with: java-version: '8' distribution: 'adopt' - - name: mvn -N install - run: - mvn -N install - name: mvn install run: #pom.xml also introduce linkis related jar,so run mvn install in first time - mvn install + ./mvnw install -Dmaven.test.skip=true -Dmaven.javadoc.skip=true - name: mvn dependency:copy-dependencies run: - mvn dependency:copy-dependencies -DincludeScope=runtime -DoutputDirectory=${{ github.workspace }}/current_dependencies + ./mvnw dependency:copy-dependencies -DincludeScope=runtime -DoutputDirectory=${{ github.workspace }}/current_dependencies - name: generate current_dependencies.txt run: | ls ${{ github.workspace }}/current_dependencies |egrep -v "^linkis" |sort > ~/current_dependencies.txt diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index f1501d378fa..f5be6ff8abf 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -59,9 +59,8 @@ jobs: # - name: Autobuild # uses: github/codeql-action/autobuild@v2 - name: Build - run: | - ./mvnw -N install - ./mvnw clean package + run: + ./mvnw clean package -Dmaven.test.skip=true - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 \ No newline at end of file diff --git a/.github/workflows/publish-docker.yaml b/.github/workflows/publish-docker.yaml index c1701f4814f..b80806e1747 100644 --- a/.github/workflows/publish-docker.yaml +++ b/.github/workflows/publish-docker.yaml @@ -61,12 +61,10 @@ jobs: sed -i "/VUE_APP_MN_CONFIG_PREFIX/d" .env npm install npm run build + - name: Build backend by maven run: | - ./mvnw -N install - - name: Build backend by maven - run: | - ./mvnw install -Pdocker -Dmaven.javadoc.skip=true -Dmaven.test.skip=true -Dlinkis.build.web=true -Dimage.build.type=release + ./mvnw install -Pdocker -Dmaven.javadoc.skip=true -Dmaven.test.skip=true -Dlinkis.build.web=true -Dlinkis.build.ldh=true - name: Log in to the Container registry uses: docker/login-action@v1.10.0 with: @@ -80,5 +78,18 @@ jobs: docker images docker tag linkis:${{ env.LINKIS_VERSION }} ${{ env.HUB }}/linkis:${{ env.DOCKER_VERSION }} docker push ${{ env.HUB }}/linkis:${{ env.DOCKER_VERSION }} + + docker tag linkis:${{ env.LINKIS_VERSION }} ${{ env.HUB }}/linkis:latest + docker push ${{ env.HUB }}/linkis:latest + docker tag linkis-web:${{ env.LINKIS_VERSION }} ${{ env.HUB }}/linkis-web:${{ env.DOCKER_VERSION }} docker push ${{ env.HUB }}/linkis-web:${{ env.DOCKER_VERSION }} + + docker tag linkis-web:${{ env.LINKIS_VERSION }} ${{ env.HUB }}/linkis-web:latest + docker push ${{ env.HUB }}/linkis-web:latest + + docker tag linkis-ldh:${{ env.LINKIS_VERSION }} ${{ env.HUB }}/linkis-ldh:${{ env.DOCKER_VERSION }} + docker push ${{ env.HUB }}/linkis-ldh:${{ env.DOCKER_VERSION }} + + docker tag linkis-ldh:${{ env.LINKIS_VERSION }} ${{ env.HUB }}/linkis-ldh:latest + docker push ${{ env.HUB }}/linkis-ldh:latest diff --git a/.github/workflows/publish-snapshot.yml b/.github/workflows/publish-snapshot.yml index 7ce11e2312f..dcf4ee9c262 100644 --- a/.github/workflows/publish-snapshot.yml +++ b/.github/workflows/publish-snapshot.yml @@ -42,18 +42,16 @@ jobs: distribution: 'adopt' java-version: 8 - - name: Get Version - run: | - BRANCE= ${{ matrix.branch }} - echo "::set-output name=version::${BRANCE#*-}-SNAPSHOT" - id: get_version + - name: Get Version + run: | + BRANCE= ${{ matrix.branch }} + echo "::set-output name=version::${BRANCE#*-}-SNAPSHOT" + id: get_version - name: Publish snapshot - ${{ matrix.branch }} env: ASF_USERNAME: ${{ secrets.NEXUS_USER }} ASF_PASSWORD: ${{ secrets.NEXUS_PW }} run: | - ./mvnw -N install ./mvnw versions:set -DnewVersion={{steps.get_version.outputs.version}} -DgenerateBackupPoms=false - ./mvnw -N install - ./mvnw deploy -s ./.github/asf-settings.xml -Drevision={{steps.get_version.outputs.version}} -DskipTests -Dmaven.javadoc.skip=true -DretryFailedDeploymentCount=10 \ No newline at end of file + ./mvnw deploy -s ./.github/asf-settings.xml -Dmaven.test.skip=true -Dmaven.javadoc.skip=true -DretryFailedDeploymentCount=10 \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index df448ad8503..f5ed5970d0e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -23,6 +23,7 @@ Helping answering the questions in the Linkis community is a very valuable way t You can find linkis documentations at [linkis-Website](https://linkis.apache.org/docs/latest/introduction), and the supplement of the document is also crucial to the development of Linkis. ### 1.5 Other + Including participating in and helping to organize community exchanges, community operation activities, etc., and other activities that can help the Linkis project and the community. ## 2. How to Contribution @@ -30,16 +31,18 @@ Including participating in and helping to organize community exchanges, communit ### 2.1 Branch structure The Linkis source code may have some temporary branches, but only the following three branches are really meaningful: + - master: The source code of the latest stable release, and occasionally several hotfix submissions; - release-*: stable release version; - dev-*: main development branch; #### 2.1.1 Concept -- Upstream repository: https://github.com/apache/incubator-linkis The apache repository of linkis is called Upstream repository in the text -- Fork repository: From https://github.com/apache/incubator-linkis fork to your own personal repository called Fork repository +- Upstream repository: The apache repository of linkis is called Upstream repository in the text +- Fork repository: From fork to your own personal repository called Fork repository #### 2.1.2 Synchronize Upstream Repository + > Synchronize the latest code of the Upstream repository branch to your own Fork repository - Step1 Enter the user project page and select the branch to be updated @@ -47,6 +50,7 @@ The Linkis source code may have some temporary branches, but only the following ![update-code](https://user-images.githubusercontent.com/7869972/176622158-52da5a80-6d6a-4f06-a099-ff65887d002c.png) #### 2.1.3 Synchronize New Branch + >Synchronize the new branch of the Upstream repository to your own Fork repository Scenario: There is a new branch in the Upstream warehouse, but the forked library does not have this branch (you can choose to delete and re-fork, but the changes that have not been merged to the original warehouse will be lost) @@ -58,25 +62,33 @@ Operate in your own clone's local project ```shell script git remote add apache git@github.com:apache/incubator-linkis.git ``` + - Step2 Pull the apache image information to the local ```shell script git fetch apache ``` + - Step3 Create a local branch based on the new branch that needs to be synced ```shell script git checkout -b dev-1.1.4 apache/dev-1.1.4 ``` + - Step4 Push the local branch to your own warehouse. If your own warehouse does not have the dev-1.1.4 branch, the dev-1.1.4 branch will be created + ```shell script git push origin dev-1.1.4:dev-1.1.4 ``` + - Step5 Delete the upstream branch + ```shell script git remote remove apache ``` + - Step6 Update the branch + ```shell script git pull ``` @@ -85,23 +97,25 @@ git pull - Step1 Confirm the base branch of the current development (usually the current version in progress, such as the version 1.1.0 currently under development by the community, then the branch is dev-1.1.0, if you are not sure, you can ask in the community group or at @relevant classmates in the issue) -- Step2 Synchronize the latest code of the Upstream warehouse branch to your own Fork warehouse branch, see the guide [2.1.2 Synchronize Upstream Repository] +- Step2 Synchronize the latest code of the Upstream warehouse branch to your own Fork warehouse branch, see the guide [2.1.2 Synchronize Upstream Repository] - Step3 Based on the development branch, pull the new fix/feature branch (do not modify it directly on the original branch, if the subsequent PR is merged in the squash method, the submitted commit records will be merged into one) + ```shell script git checkout -b dev-1.1.4-fix dev-1.1.4 git push origin dev-1.1.4-fix:dev-1.1.4-fix ``` + - Step4 Develop -- Step5 Submit pr (if it is in progress and the development has not been completely completed, please add the WIP logo to the pr title, such as `[WIP] Dev 1.1.1 Add junit test code for [linkis-common] `; associate the corresponding issue, etc.) +- Step5 Submit pr (if it is in progress and the development has not been completely completed, please add the WIP logo to the pr title, such as `[WIP] Dev 1.1.1 Add junit test code for [linkis-common]`; associate the corresponding issue, etc.) - Step6 Waiting to be merged - Step7 Delete the fix/future branch (you can do this on the github page) + ```shell script git branch -d dev-1.1.4-fix git push ``` - Please note: For the dev branch of major features, in addition to the version number, the corresponding naming description will be added, such as: dev-0.10.0-flink, which refers to the flink feature development branch of 0.10.0. ### 2.2 Development Guidelines @@ -122,6 +136,7 @@ git push origin dev-fix dev-fix ``` ### 2.3 Issue submission guidelines + - If you still don’t know how to initiate a PR to an open source project, please refer to [About issues](https://docs.github.com/en/github/managing-your-work-on-github/about-issues) - Issue name, which should briefly describe your problem or suggestion in one sentence; for the international promotion of the project, please write the issue in English or both Chinese and English - For each Issue, please bring at least two labels, component and type, such as component=Computation Governance/EngineConn, type=Improvement. Reference: [issue #590](https://github.com/apache/incubator-linkis/issues/590) @@ -193,4 +208,4 @@ If you are the Committer of the Linkis project, and all your contributions have - You can merge PRs submitted by other Committers and contributors to the dev-** branch - Participate in determining the roadmap and development direction of the Linkis project -- Can participate in the new version release \ No newline at end of file +- Can participate in the new version release diff --git a/CONTRIBUTING_CN.md b/CONTRIBUTING_CN.md index 117c4e8f707..a728e4cccd7 100644 --- a/CONTRIBUTING_CN.md +++ b/CONTRIBUTING_CN.md @@ -1,10 +1,9 @@ # 如何参与项目贡献 -> 更多信息可以见官网[如何参与项目贡献 ](https://linkis.apache.org/community/how-to-contribute) +> 更多信息可以见官网[如何参与项目贡献](https://linkis.apache.org/community/how-to-contribute) 非常感谢贡献 Linkis 项目!在参与贡献之前,请仔细阅读以下指引。 - ## 一、贡献范畴 ### 1.1 Bug 反馈与修复 @@ -21,27 +20,28 @@ ### 1.4 文档改进 -Linkis 文档位于[Linkis 官网 ](https://linkis.apache.org/zh-CN/docs/latest/introduction/) ,文档的补充完善对于 Linkis 的发展也至关重要。 +Linkis 文档位于[Linkis 官网](https://linkis.apache.org/zh-CN/docs/latest/introduction/) ,文档的补充完善对于 Linkis 的发展也至关重要。 ### 1.5 其他 -包括参与和帮助组织社区交流、社区运营活动等,其他能够帮助 Linkis 项目和社区的活动。 +包括参与和帮助组织社区交流、社区运营活动等,其他能够帮助 Linkis 项目和社区的活动。 ## 二、贡献流程 ### 2.1 分支结构 Linkis 源码可能会产生一些临时分支,但真正有明确意义的只有以下三个分支: + - master: 最近一次稳定 release 的源码,偶尔会多几次 hotfix 提交; -- release-*: 稳定的 release 版本; +- release-*: 稳定的 release 版本; - dev-*: 主要开发分支; -#### 2.1.1 概念 +#### 2.1.1 概念 -- Upstream 仓库:https://github.com/apache/incubator-linkis linkis 的 apache 仓库文中称为 Upstream 仓库 -- Fork 仓库: 从 https://github.com/apache/incubator-linkis fork 到自己个人仓库 称为 Fork 仓库 +- Upstream 仓库: linkis 的 apache 仓库文中称为 Upstream 仓库 +- Fork 仓库: 从 fork 到自己个人仓库 称为 Fork 仓库 -#### 2.1.2 同步 Upstream 仓库分支最新代码到自己的 Fork 仓库 +#### 2.1.2 同步 Upstream 仓库分支最新代码到自己的 Fork 仓库 - step1 进入用户项目页面,选中要更新的分支 - step2 点击 code 下载按钮下方的 Fetch upstream,选择 Fetch and merge (如自己的 Fork 仓库 该分支不小心污染了,可以删除该分支后,同步 Upstream 仓库新分支到自己的 Fork 仓库 ,参见指引[同步 Upstream 仓库分支最新代码到自己的 Fork 仓库 ](#213-同步 Upstream 仓库新分支到自己的 Fork 仓库 )) @@ -58,47 +58,58 @@ Linkis 源码可能会产生一些临时分支,但真正有明确意义的只 ```shell script git remote add apache git@github.com:apache/incubator-linkis.git ``` + - step2 拉去 apache 镜像信息到本地 ```shell script git fetch apache ``` + - step3 根据需要同步的新分支来创建本地分支 ```shell script git checkout -b dev-1.1.4 apache/dev-1.1.4 ``` + - step4 把本地分支 push 到自己的仓库,如果自己的仓库没有 dev-1.1.4 分支,则会创建 dev-1.1.4 分支 + ```shell script git push origin dev-1.1.4:dev-1.1.4 ``` + - step5 删除 upstream 的分支 + ```shell script git remote remove apache ``` + - step6 更新分支 + ```shell script git pull ``` -#### 2.1.4 一个 pr 的流程 +#### 2.1.4 一个 pr 的流程 - step1 确认当前开发的基础分支(一般是当前进行的中版本,如当前社区开发中的版本 1.1.0,那么分支就是 dev-1.1.0,不确定的话可以在社区群里问下或则在 issue 中@相关同学) - step2 同步 Upstream 仓库分支最新代码到自己的 Fork 仓库 分支,参见指引 [2.1.2 同步 Upstream 仓库分支最新代码到自己的 Fork 仓库 ] - step3 基于开发分支,拉取新 fix/feature 分支 (不要直接在原分支上修改,如果后续 pr 以 squash 方式 merge 后,提交的 commit 记录会被合并成一个) + ```shell script git checkout -b dev-1.1.4-fix dev-1.1.4 git push origin dev-1.1.4-fix:dev-1.1.4-fix ``` + - step4 进行开发 -- step5 提交 pr(如果是正在进行中,开发还未完全结束,请在 pr 标题上加上 WIP 标识 如 `[WIP] Dev 1.1.1 Add junit test code for [linkis-common] ` ;关联对应的 issue 等) +- step5 提交 pr(如果是正在进行中,开发还未完全结束,请在 pr 标题上加上 WIP 标识 如 `[WIP] Dev 1.1.1 Add junit test code for [linkis-common]` ;关联对应的 issue 等) - step6 等待被合并 -- step7 删除 fix/future 分支 (可以在 github 页面上进行操作) +- step7 删除 fix/future 分支 (可以在 github 页面上进行操作) + ```shell script -git branch -d dev-1.1.4-fix -git push +git branch -d dev-1.1.4-fix +git push ``` 请注意:大特性的 dev 分支,在命名时除了版本号,还会加上相应的命名说明,如:dev-0.10.0-flink,指 0.10.0 的 flink 特性开发分支。 @@ -128,7 +139,7 @@ git push origin dev-fix dev-fix - 如果您还不知道怎样向开源项目发起 PR,请参考[About pull requests](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests) - 无论是 Bug 修复,还是新功能开发,请将 PR 提交到 dev-* 分支 -- PR 和提交名称遵循 `(): ` 原则,详情可以参考[Commit message 和 Change log 编写指南 ](https://linkis.apache.org/zh-CN/community/development-specification/commit-message) +- PR 和提交名称遵循 `(): ` 原则,详情可以参考[Commit message 和 Change log 编写指南](https://linkis.apache.org/zh-CN/community/development-specification/commit-message) - 如果 PR 中包含新功能,理应将文档更新包含在本次 PR 中 - 如果本次 PR 尚未准备好合并,请在名称头部加上 [WIP] 前缀(WIP = work-in-progress) - 所有提交到 dev-* 分支的提交至少需要经过一次 Review 才可以被合并 diff --git a/README.md b/README.md index 464d34dd4b6..9c549c1e15c 100644 --- a/README.md +++ b/README.md @@ -3,8 +3,8 @@

- Linkis builds a computation middleware layer to decouple the upper applications and the underlying data engines, - provides standardized interfaces (REST, JDBC, WebSocket etc.) to easily connect to various underlying engines (Spark, Presto, Flink, etc.), + Linkis builds a computation middleware layer to decouple the upper applications and the underlying data engines, + provides standardized interfaces (REST, JDBC, WebSocket etc.) to easily connect to various underlying engines (Spark, Presto, Flink, etc.), while enables cross engine context sharing, unified job& engine governance and orchestration.

@@ -33,7 +33,7 @@ - + github forks @@ -64,7 +64,6 @@ As a computation middleware, Linkis provides powerful connectivity, reuse, orche Since the first release of Linkis in 2019, it has accumulated more than **700** trial companies and **1000+** sandbox trial users, which involving diverse industries, from finance, banking, tele-communication, to manufactory, internet companies and so on. Lots of companies have already used Linkis as a unified entrance for the underlying computation and storage engines of the big data platform. - ![linkis-intro-01](https://user-images.githubusercontent.com/7869972/148767375-aeb11b93-16ca-46d7-a30e-92fbefe2bd5e.png) ![linkis-intro-03](https://user-images.githubusercontent.com/7869972/148767380-c34f44b2-9320-4633-9ec8-662701f41d15.png) @@ -72,11 +71,11 @@ Since the first release of Linkis in 2019, it has accumulated more than **700** # Features - **Support for diverse underlying computation storage engines** - - Currently supported computation/storage engines: Spark、Hive、Flink、Python、Pipeline、Sqoop、openLooKeng、Presto、ElasticSearch、JDBC, Shell, etc - - Computation/storage engines to be supported: Trino (planned 1.3.1), SeaTunnel (planned 1.3.1), etc - - Supported scripting languages: SparkSQL、HiveQL、Python、Shell、Pyspark、R、Scala and JDBC, etc + - Currently supported computation/storage engines: Spark、Hive、Flink、Python、Pipeline、Sqoop、openLooKeng、Presto、ElasticSearch、JDBC, Shell, etc + - Computation/storage engines to be supported: Trino (planned 1.3.1), SeaTunnel (planned 1.3.1), etc + - Supported scripting languages: SparkSQL、HiveQL、Python、Shell、Pyspark、R、Scala and JDBC, etc -- **Powerful task/request governance capabilities** With services such as Orchestrator, Label Manager and customized Spring Cloud Gateway, Linkis is able to provide multi-level labels based, cross-cluster/cross-IDC fine-grained routing, load balance, multi-tenancy, traffic control, resource control, and orchestration strategies like dual-active, active-standby, etc +- **Powerful task/request governance capabilities** With services such as Orchestrator, Label Manager and customized Spring Cloud Gateway, Linkis is able to provide multi-level labels based, cross-cluster/cross-IDC fine-grained routing, load balance, multi-tenancy, traffic control, resource control, and orchestration strategies like dual-active, active-standby, etc - **Support full stack computation/storage engine** As a computation middleware, it will receive, execute and manage tasks and requests for various computation storage engines, including batch tasks, interactive query tasks, real-time streaming tasks and storage tasks @@ -107,19 +106,6 @@ Since the first release of Linkis in 2019, it has accumulated more than **700** |TiSpark|1.1|ongoing|-|TiSpark EngineConn, supports querying TiDB with SparkSQL| -# Ecosystem - -| Component | Description | Linkis 1.x(recommend 1.1.1) Compatible | -| --------------- | -------------------------------------------------------------------- | --------- | -| [**DataSphereStudio**](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/README.md) | DataSphere Studio (DSS for short) is WeDataSphere, a one-stop data application development management portal | **DSS 1.0.1[released][Linkis recommend 1.1.1]** | -| [**Scriptis**](https://github.com/WeBankFinTech/Scriptis) | Support online script writing such as SQL, Pyspark, HiveQL, etc., submit to [Linkis](https://github.com/apache/incubator-linkis) to perform data analysis web tools | **In DSS 1.0.1[released]** | -| [**Schedulis**](https://github.com/WeBankFinTech/Schedulis) | Workflow task scheduling system based on Azkaban secondary development, with financial-grade features such as high performance, high availability and multi-tenant resource isolation | **Schedulis0.6.2 [released]** | -| [**Qualitis**](https://github.com/WeBankFinTech/Qualitis) | Data quality verification tool, providing data verification capabilities such as data integrity and correctness |**Qualitis 0.9.1 [released]** | -| [**Streamis**](https://github.com/WeBankFinTech/Streamis) | Streaming application development management tool. It supports the release of Flink Jar and Flink SQL, and provides the development, debugging and production management capabilities of streaming applications, such as: start-stop, status monitoring, checkpoint, etc| **Streamis 0.1.0 [released][Linkis recommend 1.1.0]** | -| [**Exchangis**](https://github.com/WeBankFinTech/Exchangis) | A data exchange platform that supports data transmission between structured and unstructured heterogeneous data sources, the upcoming Exchangis1. 0, will be connected with DSS workflow | **Exchangis 1.0.0 [developing]**| -| [**Visualis**](https://github.com/WeBankFinTech/Visualis) | A data visualization BI tool based on the second development of Davinci, an open source project of CreditEase, provides users with financial-level data visualization capabilities in terms of data security | **Visualis 1.0.0[developing]**| -| [**Prophecis**](https://github.com/WeBankFinTech/Prophecis) | A one-stop machine learning platform that integrates multiple open source machine learning frameworks. Prophecis' MLFlow can be connected to DSS workflow through AppConn | **Prophecis 0.3.0 [released]** | - # Download Please go to the [Linkis Releases Page](https://linkis.apache.org/download/main) to download a compiled distribution or a source code package of Linkis. @@ -132,20 +118,29 @@ Please go to the [Linkis Releases Page](https://linkis.apache.org/download/main) ```shell +Note: If you want use `-Dlinkis.build.web=true` to build linkis-web image, you need to compile linkis-web first. + ## compile backend ### Mac OS/Linux + # 1. When compiling for the first time, execute the following command first ./mvnw -N install + # 2. make the linkis distribution package # - Option 1: make the linkis distribution package only ./mvnw clean install -Dmaven.javadoc.skip=true -Dmaven.test.skip=true + # - Option 2: make the linkis distribution package and docker image +# - Option 2.1: image without mysql jdbc jars ./mvnw clean install -Pdocker -Dmaven.javadoc.skip=true -Dmaven.test.skip=true +# - Option 2.2: image with mysql jdbc jars +./mvnw clean install -Pdocker -Dmaven.javadoc.skip=true -Dmaven.test.skip=true -Dlinkis.build.with.jdbc=true + # - Option 3: linkis distribution package and docker image (included web) ./mvnw clean install -Pdocker -Dmaven.javadoc.skip=true -Dmaven.test.skip=true -Dlinkis.build.web=true -# - Option 4: linkis distribution package and docker image (included web and ldh (hadoop all in one for test)) -./mvnw clean install -Pdocker -Dmaven.javadoc.skip=true -Dmaven.test.skip=true -Dlinkis.build.web=true -Dlinkis.build.ldh=true +# - Option 4: linkis distribution package and docker image (included web and ldh (hadoop all in one for test)) +./mvnw clean install -Pdocker -Dmaven.javadoc.skip=true -Dmaven.test.skip=true -Dlinkis.build.web=true -Dlinkis.build.ldh=true -Dlinkis.build.with.jdbc=true ### Windows mvnw.cmd -N install @@ -156,7 +151,19 @@ cd incubator-linkis/linkis-web npm install npm run build ``` - + +### Bundled with MySQL JDBC Driver +Due to the MySQL licensing restrictions, the MySQL Java Database Connectivity (JDBC) driver is not bundled with the +official released linkis image by default. However, at current stage, linkis still relies on this library to work properly. +To solve this problem, we provide a script which can help to creating an custom image with mysql jdbc from the official +linkis image by yourself, the image created by this tool will be tagged as `linkis:with-jdbc` by default. + +```shell +$> LINKIS_IMAGE=linkis:1.3.0 +$> ./linkis-dist/docker/scripts/make-linikis-image-with-mysql-jdbc.sh +``` + + Please refer to [Quick Deployment](https://linkis.apache.org/docs/latest/deployment/quick-deploy) to do the deployment. # Examples and Guidance @@ -178,10 +185,6 @@ Linkis services could be divided into three categories: computation governance s Below is the Linkis architecture diagram. You can find more detailed architecture docs in [Linkis-Doc/Architecture](https://linkis.apache.org/docs/latest/architecture/overview). ![architecture](https://user-images.githubusercontent.com/7869972/148767383-f87e84ba-5baa-4125-8b6e-d0aa4f7d3a66.png) -Based on Linkis the computation middleware, we've built a lot of applications and tools on top of it in the big data platform suite [WeDataSphere](https://github.com/WeBankFinTech/WeDataSphere). Below are the currently available open-source projects. More projects upcoming, please stay tuned. - -![wedatasphere_stack_Linkis](https://user-images.githubusercontent.com/7869972/148767389-049361df-3609-4c2f-a4e2-c904c273300e.png) - # Contributing Contributions are always welcomed, we need more contributors to build Linkis together. either code, or doc, or other supports that could help the community. @@ -197,7 +200,6 @@ For code and documentation contributions, please follow the [contribution guide] ![wechatgroup](https://linkis.apache.org/Images/wedatasphere_contact_01.png) - # Who is Using Linkis We opened an issue [[Who is Using Linkis]](https://github.com/apache/incubator-linkis/issues/23) for users to feedback and record who is using Linkis. diff --git a/README_CN.md b/README_CN.md index 566d21238b0..df5405700d3 100644 --- a/README_CN.md +++ b/README_CN.md @@ -30,9 +30,9 @@ - + - + github forks @@ -53,7 +53,7 @@
--- -[English](README.md) | [中文 ](README_CN.md) +[English](README.md) | [中文](README_CN.md) # 介绍 @@ -66,14 +66,14 @@ Linkis 自 2019 年开源发布以来,已累计积累了 700 多家试验企 ![没有 Linkis 之前 ](https://user-images.githubusercontent.com/7869972/148767370-06025750-090e-4fd6-bd32-aab2fbb01352.png) - ![有了 Linkis 之后 ](https://user-images.githubusercontent.com/7869972/148767358-b02ae982-4080-4efa-aa0f-768ca27902b7.png) ## 核心特点 + - **丰富的底层计算存储引擎支持** - - **目前支持的计算存储引擎** Spark、Hive、Flink、Python、Pipeline、Sqoop、openLooKeng、Presto、ElasticSearch、JDBC 和 Shell 等 - - **正在支持中的计算存储引擎** Trino(计划 1.3.1)、SeaTunnel(计划 1.3.1) 等 - - **支持的脚本语言** SparkSQL、HiveQL、Python、Shell、Pyspark、R、Scala 和 JDBC 等 + - **目前支持的计算存储引擎** Spark、Hive、Flink、Python、Pipeline、Sqoop、openLooKeng、Presto、ElasticSearch、JDBC 和 Shell 等 + - **正在支持中的计算存储引擎** Trino(计划 1.3.1)、SeaTunnel(计划 1.3.1) 等 + - **支持的脚本语言** SparkSQL、HiveQL、Python、Shell、Pyspark、R、Scala 和 JDBC 等 - **强大的计算治理能力** 基于 Orchestrator、Label Manager 和定制的 Spring Cloud Gateway 等服务,Linkis 能够提供基于多级标签的跨集群/跨 IDC 细粒度路由、负载均衡、多租户、流量控制、资源控制和编排策略 (如双活、主备等) 支持能力 - **全栈计算存储引擎架构支持** 能够接收、执行和管理针对各种计算存储引擎的任务和请求,包括离线批量任务、交互式查询任务、实时流式任务和存储型任务 - **资源管理能力** ResourceManager 不仅具备对 Yarn 和 Linkis EngineManager 的资源管理能力,还将提供基于标签的多级资源分配和回收能力,让 ResourceManager 具备跨集群、跨计算资源类型的强大资源管理能力 @@ -102,20 +102,6 @@ Linkis 自 2019 年开源发布以来,已累计积累了 700 多家试验企 |Hadoop|Apache >=2.6.0,
CDH >=5.4.0|ongoing|-|Hadoop EngineConn, 支持 Hadoop MR/YARN application| |TiSpark|1.1|ongoing|-|TiSpark EngineConn, 支持用 SparkSQL 查询 TiDB| - -# 生态组件 - -| 应用工具 | 描述 | Linkis 1.X(推荐 1.1.1) 兼容版本 | -| --------------- | -------------------------------------------------------------------- | ---------- | -| [**DataSphere Studio**](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/README-ZH.md) | DataSphere Studio(简称 DSS)数据应用开发管理集成框架 | **DSS 1.0.1[已发布 ][Linkis 推荐 1.1.1]** | -| [**Scriptis**](https://github.com/WeBankFinTech/Scriptis) | 支持在线写 SQL、Pyspark、HiveQL 等脚本,提交给[Linkis](https://github.com/apache/incubator-linkis) 执行的数据分析 Web 工具 | 在 DSS 1.0.1 中[已发布 ] | -| [**Schedulis**](https://github.com/WeBankFinTech/Schedulis) | 基于 Azkaban 二次开发的工作流任务调度系统,具备高性能,高可用和多租户资源隔离等金融级特性 | **Schedulis0.6.2 [已发布 ]** | -| [**Qualitis**](https://github.com/WeBankFinTech/Qualitis) | 数据质量校验工具,提供数据完整性、正确性等数据校验能力 | **Qualitis 0.9.0 [已发布 ]** | -| [**Streamis**](https://github.com/WeBankFinTech/Streamis) | 流式应用开发管理工具。支持发布 Flink Jar 和 Flink SQL ,提供流式应用的开发调试和生产管理能力,如:启停、状态监控、checkpoint 等 | **Streamis 0.1.0 [已发布 ][Linkis 推荐 1.1.0]** | -| [**Exchangis**](https://github.com/WeBankFinTech/Exchangis) | 支持对结构化及无结构化的异构数据源之间的数据传输的数据交换平台,即将发布的 Exchangis1.0,将与 DSS 工作流打通 | **Exchangis 1.0.0 [开发中 ]** | -| [**Visualis**](https://github.com/WeBankFinTech/Visualis) | 基于宜信开源项目 Davinci 二次开发的数据可视化 BI 工具,为用户在数据安全方面提供金融级数据可视化能力 | **Visualis 1.0.0[开发中 ]** | -| [**Prophecis**](https://github.com/WeBankFinTech/Prophecis) | 一站式机器学习平台,集成多种开源机器学习框架。Prophecis 的 MLFlow 通过 AppConn 可以接入到 DSS 工作流中 | **Prophecis 0.3.0 [已发布 ]** | - # 下载 请前往[Linkis Releases 页面 ](https://linkis.apache.org/download/main) 下载 Linkis 的已编译版本或源码包。 @@ -126,23 +112,49 @@ Linkis 自 2019 年开源发布以来,已累计积累了 700 多家试验企 >- [后端编译打包 ](https://linkis.apache.org/zh-CN/docs/latest/development/linkis-compile-and-package) >- [管理台编译 ](https://linkis.apache.org/zh-CN/docs/latest/development/web-build) + + ```shell script ## 后端编译 +注意:如果使用 -Dlinkis.build.web=true 来构建web镜像,需要首先执行 管理台 linkis-web 的编译 ### Mac OS/Linux 系统 +# 1. 首次编译,请执行如下命令 ./mvnw -N install -./mvnw clean install -Dmaven.javadoc.skip=true -Dmaven.test.skip=true +# 2. 构建 Linkis 部署包 +# - 选项 1: 仅构建 Linkis 服务部署包 +./mvnw clean install -Dmaven.javadoc.skip=true -Dmaven.test.skip=true +# - 选项 2: 构建 Linkis 服务部署包和 Docker 镜像 +# - 选项 2.1: 构建的镜像中不包含 mysql jdbc 包 +./mvnw clean install -Pdocker -Dmaven.javadoc.skip=true -Dmaven.test.skip=true +# - 选项 2.2: 构建的镜像中包含 mysql jdbc 包 +./mvnw clean install -Pdocker -Dmaven.javadoc.skip=true -Dmaven.test.skip=true -Dlinkis.build.with.jdbc=true +# - 选项 3: 构建 Linkis 服务部署包和 Docker 镜像(包括 Web 控制台) +./mvnw clean install -Pdocker -Dmaven.javadoc.skip=true -Dmaven.test.skip=true -Dlinkis.build.web=true +# - 选项 4: 仅构建 Linkis 服务部署包和 Docker 镜像(包括 Web 控制台和 LDH 镜像,LDH 镜像包换了多个预先配置和部署好的 Hadoop 组件,主要面向测试用途) +./mvnw clean install -Pdocker -Dmaven.javadoc.skip=true -Dmaven.test.skip=true -Dlinkis.build.web=true -Dlinkis.build.ldh=true -Dlinkis.build.with.jdbc=true ### Windows 系统 mvnw.cmd -N install mvnw.cmd clean install -Dmaven.javadoc.skip=true -Dmaven.test.skip=true + ## 管理台编译 cd incubator-linkis/linkis-web npm install npm run build ``` +### 集成 MySQL JDBC 驱动 + +由于MySQL的许可协议限制,官方发布的 Linkis 镜像没有集成 MySQL JDBC 驱动。 然而,在现阶段,Linkis 仍然依赖这个库才能正常运行。 为了解决这个问题, +我们提供了一个脚本,它可以帮助你快速的基于官方的 Linkis 镜像创建一个集成了MySQL JDBC 的自定义镜像。 这个工具创建的镜像默认的名称是 `linkis:with-jdbc`。 + +```shell +$> LINKIS_IMAGE=linkis:1.3.0 +$> ./linkis-dist/docker/scripts/make-linikis-image-with-mysql-jdbc.sh +``` + 请参考[快速安装部署 ](https://linkis.apache.org/zh-CN/docs/latest/deployment/quick-deploy) 来部署 Linkis # 示例和使用指引 @@ -157,9 +169,9 @@ npm run build - Meetup 视频 [Bilibili](https://space.bilibili.com/598542776?from=search&seid=14344213924133040656) - # 架构概要 Linkis 基于微服务架构开发,其服务可以分为 3 类:计算治理服务、公共增强服务和微服务治理服务。 + - 计算治理服务,支持计算任务/请求处理流程的 3 个主要阶段:提交-> 准备-> 执行 - 公共增强服务,包括上下文服务、物料管理服务及数据源服务等 - 微服务治理服务,包括定制化的 Spring Cloud Gateway、Eureka、Open Feign @@ -167,39 +179,20 @@ Linkis 基于微服务架构开发,其服务可以分为 3 类:计算治理服 下面是 Linkis 的架构概要图. 更多详细架构文档请见 [Linkis-Doc/Architecture](https://linkis.apache.org/zh-CN/docs/latest/architecture/overview). ![architecture](https://user-images.githubusercontent.com/7869972/148767383-f87e84ba-5baa-4125-8b6e-d0aa4f7d3a66.png) -基于 Linkis 计算中间件,我们在大数据平台套件[WeDataSphere](https://github.com/WeBankFinTech/WeDataSphere) 中构建了许多应用和工具系统。下面是目前可用的开源项目。 - -![wedatasphere_stack_Linkis](https://user-images.githubusercontent.com/7869972/148767389-049361df-3609-4c2f-a4e2-c904c273300e.png) - -- [**DataSphere Studio** - 数据应用集成开发框架 ](https://github.com/WeBankFinTech/DataSphereStudio) - -- [**Scriptis** - 数据研发 IDE 工具 ](https://github.com/WeBankFinTech/Scriptis) - -- [**Visualis** - 数据可视化工具 ](https://github.com/WeBankFinTech/Visualis) - -- [**Schedulis** - 工作流调度工具 ](https://github.com/WeBankFinTech/Schedulis) - -- [**Qualitis** - 数据质量工具 ](https://github.com/WeBankFinTech/Qualitis) - -- [**MLLabis** - 容器化机器学习 notebook 开发环境 ](https://github.com/WeBankFinTech/prophecis) - -更多项目开源准备中,敬请期待。 # 贡献 我们非常欢迎和期待更多的贡献者参与共建 Linkis, 不论是代码、文档,或是其他能够帮助到社区的贡献形式。 -代码和文档相关的贡献请参照[贡献指引 ](https://linkis.apache.org/zh-CN/community/how-to-contribute). +代码和文档相关的贡献请参照[贡献指引](https://linkis.apache.org/zh-CN/community/how-to-contribute). # 联系我们 - 对 Linkis 的任何问题和建议,可以提交 issue,以便跟踪处理和经验沉淀共享 -- 通过邮件方式 [dev@linkis.apache.org](mailto:dev@linkis.apache.org) +- 通过邮件方式 [dev@linkis.apache.org](mailto:dev@linkis.apache.org) - 可以扫描下面的二维码,加入我们的微信群,以获得更快速的响应 ![wechatgroup](https://linkis.apache.org/Images/wedatasphere_contact_01.png) - - # 谁在使用 Linkis 我们创建了一个 issue [[Who is Using Linkis]](https://github.com/apache/incubator-linkis/issues/23) 以便用户反馈和记录谁在使用 Linkis. diff --git a/docs/configuration-change-records.md b/docs/configuration-change-records.md new file mode 100644 index 00000000000..908c62afc48 --- /dev/null +++ b/docs/configuration-change-records.md @@ -0,0 +1,7 @@ +## configuration change + + +| Module Name (Service Name) |Type| Parameter Name | Default Value | Description | +| ----------- | ----- | ------------------------------- | ---------------- | -------------------- | +|ps-metadataquery | Modify | wds.linkis.server.mdm.module.name
->
linkis.server.mdq.module.name | metadataquery|metadata query service name| +|ps-metadataquery | Modify | wds.linkis.server.dsm.module.name
->
linkis.server.dsm.module.name | data-source-manager|Data Source Management Service Name | \ No newline at end of file diff --git a/docs/configuration/accessible-executor.md b/docs/configuration/accessible-executor.md index fea3432cc28..8ba1bbfbca9 100644 --- a/docs/configuration/accessible-executor.md +++ b/docs/configuration/accessible-executor.md @@ -1,7 +1,7 @@ -## accessible-executor 配置 +## accessible-executor configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |accessible-executor|wds.linkis.engineconn.log.cache.default|500|cache.default| |accessible-executor|wds.linkis.engineconn.ignore.words|org.apache.spark.deploy.yarn.Client |ignore.words| diff --git a/docs/configuration/elasticsearch.md b/docs/configuration/elasticsearch.md index f6e4cf73820..2104c0b0ed9 100644 --- a/docs/configuration/elasticsearch.md +++ b/docs/configuration/elasticsearch.md @@ -1,7 +1,7 @@ -## elasticsearch 配置 +## elasticsearch configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |elasticsearch|linkis.es.cluster|127.0.0.1:9200|cluster| |elasticsearch|linkis.datasource|default_datasource |linkis.datasource| diff --git a/docs/configuration/executor-core.md b/docs/configuration/executor-core.md index 9af0d2bb9f9..fb595fe2fda 100644 --- a/docs/configuration/executor-core.md +++ b/docs/configuration/executor-core.md @@ -1,7 +1,7 @@ -## executor-core 配置 +## executor-core configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |executor-core|wds.linkis.dataworkclod.engine.tmp.path|file:///tmp/|engine.tmp.path| |executor-core|wds.linkis.engine.application.name | |application.name| diff --git a/docs/configuration/flink.md b/docs/configuration/flink.md index 92bd216501d..ff1e5e97cda 100644 --- a/docs/configuration/flink.md +++ b/docs/configuration/flink.md @@ -1,7 +1,6 @@ -## flink 配置 +## flink configure - -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |flink|flink.client.memory |1024|client.memory | |flink|flink.taskmanager.memory|4096 |taskmanager.memory| diff --git a/docs/configuration/info-1.2.1.md b/docs/configuration/info-1.2.1.md deleted file mode 100644 index e2da12446bf..00000000000 --- a/docs/configuration/info-1.2.1.md +++ /dev/null @@ -1,29 +0,0 @@ -## 参数变化 - - -| 模块名(服务名)| 类型 | 参数名 | 默认值 | 描述 | -| ----------- | ----- | -------------------------------------------------------- | ---------------- | ------------------------------------------------------- | -|ps-metadataquery | 新增 | wds.linkis.server.mdq.mysql.relationship | oracle,kingbase,postgresql,sqlserver,db2,greenplum,dm,mysql | 在mysql元数据服务兼容oracle,kingbase,postgresql,sqlserver,db2,greenplum,dm,驱动外部引入 | -|cg-engineplugin | 新增 | wds.linkis.trino.default.limit | 5000 | Trino查询的结果集返回条数限制 | -|cg-engineplugin | 新增 | wds.linkis.trino.http.connectTimeout | 60 | 连接Trino服务器的超时时间 | -|cg-engineplugin | 新增 | wds.linkis.trino.http.readTimeout | 60 | 等待Trino服务器返回数据的超时时间 | -|cg-engineplugin | 新增 | wds.linkis.trino.resultSet.cache.max | 512k | Trino结果集缓冲区大小 | -|cg-engineplugin | 新增 | wds.linkis.trino.url | http://127.0.0.1:8080 | Trino服务器URL | -|cg-engineplugin | 新增 | wds.linkis.trino.user | null | 用于连接Trino查询服务的用户名 | -|cg-engineplugin | 新增 | wds.linkis.trino.password | null | 用于连接Trino查询服务的密码 | -|cg-engineplugin | 新增 | wds.linkis.trino.passwordCmd | null | 用于连接Trino查询服务的密码回调命令 | -|cg-engineplugin | 新增 | wds.linkis.trino.catalog | system | 连接Trino查询时使用的catalog | -|cg-engineplugin | 新增 | wds.linkis.trino.schema | | 连接Trino查询服务的默认schema | -|cg-engineplugin | 新增 | wds.linkis.trino.ssl.insecured | false | 是否忽略服务器的SSL证书 | -|cg-engineplugin | 新增 | wds.linkis.trino.ssl.keystore | null | keystore路径 | -|cg-engineplugin | 新增 | wds.linkis.trino.ssl.keystore.type | null | keystore类型 | -|cg-engineplugin | 新增 | wds.linkis.trino.ssl.keystore.password | null | keystore密码 | -|cg-engineplugin | 新增 | wds.linkis.trino.ssl.truststore | null | truststore路径 | -|cg-engineplugin | 新增 | wds.linkis.trino.ssl.truststore.type | null | truststore类型 | -|cg-engineplugin | 新增 | wds.linkis.trino.ssl.truststore.password | null | truststore密码 | - -## 特性说明 -| 模块名(服务名)| 类型 | 特性 | -| ----------- | ---------------- | ------------------------------------------------------- | -|linkis-metadata-query-service-mysql | 新增 | 基于mysql 模块融合dm,greenplum,kingbase,oracle,postgres,sqlserver ,协议和sql 区分开,metadata-query 反射多个数据源,基于mysql模块扩展,融合为一个模块。| -|linkis-engineconn-plugins-trino | 新增 | 基于trino-client实现的Trino查询引擎。| diff --git a/docs/configuration/jdbc.md b/docs/configuration/jdbc.md index 4caea6c88fa..24418f436cd 100644 --- a/docs/configuration/jdbc.md +++ b/docs/configuration/jdbc.md @@ -2,9 +2,8 @@ | 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| | -------- | -------- | ----- |----- | ----- | -|jdbc|wds.linkis.resultSet.cache.max| 0k |cache.max| -|jdbc|wds.linkis.jdbc.default.limit |5000 |jdbc.default.limit| +|jdbc|wds.linkis.resultSet.cache.max| 0k |cache.max| +|jdbc|wds.linkis.jdbc.default.limit |5000 |jdbc.default.limit| |jdbc|wds.linkis.jdbc.query.timeout|1800|jdbc.query.timeout| |jdbc|wds.linkis.engineconn.jdbc.concurrent.limit|100| jdbc.concurrent.limit | -|jdbc|wds.linkis.keytab.enable|false|eytab.enable| - \ No newline at end of file +|jdbc|wds.linkis.keytab.enable|false|eytab.enable| diff --git a/docs/configuration/linkis-application-manager.md b/docs/configuration/linkis-application-manager.md index fe78e026f47..543623f5949 100644 --- a/docs/configuration/linkis-application-manager.md +++ b/docs/configuration/linkis-application-manager.md @@ -1,7 +1,7 @@ -## linkis-application-manager 配置 +## linkis-application-manager configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-application-manager|wds.linkis.label.node.long.lived.label.keys |tenant|lived.label.keys| |linkis-application-manager|wds.linkis.governance.admin.operations| |admin.operations| diff --git a/docs/configuration/linkis-bml-server.md b/docs/configuration/linkis-bml-server.md index 9a28b9b764c..473f648b49d 100644 --- a/docs/configuration/linkis-bml-server.md +++ b/docs/configuration/linkis-bml-server.md @@ -1,7 +1,6 @@ -## linkis-bml-server 配置 +## linkis-bml-server configure - -| 模块名(服务名) | 参数名 | 默认值 | 描述 | +| Module Name (Service Name) | Parameter Name | Default Value | Description | | -------- | -------- | ----- |----- | |linkis-bml-server|wds.linkis.bml.hdfs.prefix| /apps-data | bml.hdfs.prefix| |linkis-bml-server|wds.linkis.bml.local.prefix|true|bml.local.prefix| diff --git a/docs/configuration/linkis-common.md b/docs/configuration/linkis-common.md index af4b708adcd..fd3f0f19837 100644 --- a/docs/configuration/linkis-common.md +++ b/docs/configuration/linkis-common.md @@ -1,6 +1,6 @@ -## linkis-common 配置项 +## linkis-common configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-common|wds.linkis.encoding|utf-8|字符类型|true| |linkis-common|wds.linkis.date.pattern |yyyy-MM-dd'T'HH:mm:ssZ |时间格式|true| diff --git a/docs/configuration/linkis-computation-client.md b/docs/configuration/linkis-computation-client.md index 49d1edbe42d..bf71677101e 100644 --- a/docs/configuration/linkis-computation-client.md +++ b/docs/configuration/linkis-computation-client.md @@ -1,7 +1,5 @@ ## linkis-computation-client 配置 - | 模块名(服务名) | 参数名 | 默认值 | 描述 | -| -------- | -------- | ----- |----- | +| -------- | -------- | ----- |----- | | linkis-computation-client |linkis.client.operator.once.log.enable|true|once.log.enable| - diff --git a/docs/configuration/linkis-computation-engineconn.md b/docs/configuration/linkis-computation-engineconn.md index 8ed71a815e7..379362364f9 100644 --- a/docs/configuration/linkis-computation-engineconn.md +++ b/docs/configuration/linkis-computation-engineconn.md @@ -1,7 +1,7 @@ -## linkis-computation-engineconn 配置 +## linkis-computation-engineconn configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-computation-engineconn|wds.linkis.engine.resultSet.cache.max |0k|engine.resultSet.cache.max| |linkis-computation-engineconn|wds.linkis.engine.lock.expire.time|2 * 60 * 1000 |lock.expire.time| diff --git a/docs/configuration/linkis-computation-governance-common.md b/docs/configuration/linkis-computation-governance-common.md index 2687b60a383..f4b0a858872 100644 --- a/docs/configuration/linkis-computation-governance-common.md +++ b/docs/configuration/linkis-computation-governance-common.md @@ -1,7 +1,7 @@ -## linkis-computation-governance-common 配置 +## linkis-computation-governance-common configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | +| Module Name (Service Name) | Parameter Name | Default Value | Description | | -------- | -------- | ----- |----- | |linkis-computation-governance-common|wds.linkis.rm| | wds.linkis.rm | |linkis-computation-governance-common|wds.linkis.spark.engine.version|2.4.3 |spark.engine.version| diff --git a/docs/configuration/linkis-computation-orchestrator.md b/docs/configuration/linkis-computation-orchestrator.md index d46ada1152b..ab7abe5a843 100644 --- a/docs/configuration/linkis-computation-orchestrator.md +++ b/docs/configuration/linkis-computation-orchestrator.md @@ -1,7 +1,7 @@ -## linkis-computation-orchestrator 配置 +## linkis-computation-orchestrator configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-computation-orchestrator|wds.linkis.computation.orchestrator.create.service |dss|orchestrator.create.service| |linkis-computation-orchestrator|wds.linkis.computation.orchestrator.mark.max.engine|3 |orchestrator.mark.max.engine| diff --git a/docs/configuration/linkis-configuration.md b/docs/configuration/linkis-configuration.md index eecca317971..b0aa967942f 100644 --- a/docs/configuration/linkis-configuration.md +++ b/docs/configuration/linkis-configuration.md @@ -1,7 +1,7 @@ -## linkis-configuration 配置 +## linkis-configuration configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | +| Module Name (Service Name) | Parameter Name | Default Value | Description | | -------- | -------- | ----- |----- | | linkis-configuration |wds.linkis.configuration.engine.type| |configuration.engine.type| | linkis-configuration |wds.linkis.engineconn.manager.name|linkis-cg-linkismanager |engineconn.manager.name| diff --git a/docs/configuration/linkis-engineconn-common.md b/docs/configuration/linkis-engineconn-common.md index ca45af2ce40..5484165b5bf 100644 --- a/docs/configuration/linkis-engineconn-common.md +++ b/docs/configuration/linkis-engineconn-common.md @@ -1,7 +1,7 @@ -## linkis-engineconn-common 配置 +## linkis-engineconn-common configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-engineconn-common|wds.linkis.engine.connector.executions|org.apache.linkis.engineconn.computation.executor.execute.ComputationEngineConnExecution|connector.executions| |linkis-engineconn-common|wds.linkis.engine.connector.hooks |org.apache.linkis.engineconn.computation.executor.hook.ComputationEngineConnHook |engine.connector.hooks| diff --git a/docs/configuration/linkis-engineconn-manager-core.md b/docs/configuration/linkis-engineconn-manager-core.md index 8830298744a..b5262579d2c 100644 --- a/docs/configuration/linkis-engineconn-manager-core.md +++ b/docs/configuration/linkis-engineconn-manager-core.md @@ -1,7 +1,5 @@ ## linkis-engineconn-manager-core 配置 - | 模块名(服务名) | 参数名 | 默认值 | 描述 | -| -------- | -------- | ----- |----- | +| -------- | -------- | ----- |----- | | linkis-engineconn-manager-core |linkis.ec.core.dump.disable|true|dump.disable| - diff --git a/docs/configuration/linkis-engineconn-manager-server.md b/docs/configuration/linkis-engineconn-manager-server.md index c3d34cbf31d..efd9e0794a4 100644 --- a/docs/configuration/linkis-engineconn-manager-server.md +++ b/docs/configuration/linkis-engineconn-manager-server.md @@ -1,7 +1,7 @@ -## linkis-engineconn-manager-server 配置 +## linkis-engineconn-manager-server configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-engineconn-manager-server|wds.linkis.ecm.async.bus.capacity |500|bus.capacity| |linkis-engineconn-manager-server|wds.linkis.ecm.async.bus.name|em_async_bus |async.bus.name| diff --git a/docs/configuration/linkis-engineconn-plugin-core.md b/docs/configuration/linkis-engineconn-plugin-core.md index 88b1146f3e5..542f5cc25d4 100644 --- a/docs/configuration/linkis-engineconn-plugin-core.md +++ b/docs/configuration/linkis-engineconn-plugin-core.md @@ -1,7 +1,7 @@ -## linkis-engineconn-plugin-core 配置 +## linkis-engineconn-plugin-core configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-engineconn-plugin-core|wds.linkis.engineConn.jars|engineConn额外的Jars|engineConn.jars| |linkis-engineconn-plugin-core|wds.linkis.engineConn.files |engineConn额外的配置文件 |engineConn.files | diff --git a/docs/configuration/linkis-engineconn-plugin-server.md b/docs/configuration/linkis-engineconn-plugin-server.md index 4c8f301cf14..fa16a87c537 100644 --- a/docs/configuration/linkis-engineconn-plugin-server.md +++ b/docs/configuration/linkis-engineconn-plugin-server.md @@ -1,7 +1,7 @@ -## linkis-engineconn-plugin-server 配置 +## linkis-engineconn-plugin-server configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | +| Module Name (Service Name) | Parameter Name | Default Value | Description | | -------- | -------- | ----- |----- | | wds.linkis.engineconn.plugin.loader.classname | |plugin.loader.classname| | wds.linkis.engineconn.plugin.loader.defaultUser | hadoop |wds.linkis.engineconn.plugin.loader.defaultUser| @@ -12,16 +12,3 @@ | wds.linkis.engineconn.home | |engineconn.home| | wds.linkis.engineconn.dist.load.enable |true |dist.load.enable| | wds.linkis.engineconn.bml.upload.failed.enable |true |upload.failed.enable| - - - - - - - - - - - - - \ No newline at end of file diff --git a/docs/configuration/linkis-entrance.md b/docs/configuration/linkis-entrance.md index ec928f2221d..c1d64098682 100644 --- a/docs/configuration/linkis-entrance.md +++ b/docs/configuration/linkis-entrance.md @@ -1,7 +1,7 @@ -## linkis-entrance 配置 +## linkis-entrance configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-entrance|wds.linkis.entrance.scheduler.maxParallelismUsers |1000| scheduler.maxParallelismUsers| |linkis-entrance|wds.linkis.entrance.listenerBus.queue.capacity|5000 |queue.capacity| diff --git a/docs/configuration/linkis-gateway-core.md b/docs/configuration/linkis-gateway-core.md index 983bcc918a8..be933b2a26f 100644 --- a/docs/configuration/linkis-gateway-core.md +++ b/docs/configuration/linkis-gateway-core.md @@ -1,7 +1,7 @@ -## linkis-gateway-core 配置 +## linkis-gateway-core configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-gateway-core|wds.linkis.gateway.conf.enable.proxy.user |false|gateway.conf.enable.proxy.user| |linkis-gateway-core|wds.linkis.gateway.conf.proxy.user.config|proxy.properties|proxy.user.config| diff --git a/docs/configuration/linkis-gateway-httpclient-support.md b/docs/configuration/linkis-gateway-httpclient-support.md index 04e138331a0..88008e5331c 100644 --- a/docs/configuration/linkis-gateway-httpclient-support.md +++ b/docs/configuration/linkis-gateway-httpclient-support.md @@ -1,6 +1,5 @@ ## linkis-gateway-httpclient-support 配置 - | 模块名(服务名) | 参数名 | 默认值 | 描述 | -| -------- | -------- | ----- |----- | +| -------- | -------- | ----- |----- | | linkis-gateway-httpclient-support |linkis.gateway.enabled.defalut.discovery|true|gateway.enabled.defalut.discovery| diff --git a/docs/configuration/linkis-hadoop-common.md b/docs/configuration/linkis-hadoop-common.md index d30ea5aba28..d151ce10814 100644 --- a/docs/configuration/linkis-hadoop-common.md +++ b/docs/configuration/linkis-hadoop-common.md @@ -1,7 +1,7 @@ -## linkis-hadoop-common 配置 +## linkis-hadoop-common configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-hadoop-common|wds.linkis.hadoop.root.user|hadoop-8|hadoop.root.user|true| |linkis-hadoop-common|wds.linkis.keytab.enable |false |keytab.enable|true| diff --git a/docs/configuration/linkis-httpclient.md b/docs/configuration/linkis-httpclient.md index 695cb2e80b7..b8720181596 100644 --- a/docs/configuration/linkis-httpclient.md +++ b/docs/configuration/linkis-httpclient.md @@ -1,9 +1,6 @@ -## linkis-httpclient 配置 +## linkis-httpclient configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-httpclient|wds.linkis.httpclient.default.connect.timeOut| 50000 | httpclient.default.connect.timeOut |true| - - - diff --git a/docs/configuration/linkis-instance-label-client.md b/docs/configuration/linkis-instance-label-client.md index 11a5213624c..b9938003e94 100644 --- a/docs/configuration/linkis-instance-label-client.md +++ b/docs/configuration/linkis-instance-label-client.md @@ -1,8 +1,5 @@ ## linkis-instance-label-client 配置 - | 模块名(服务名) | 参数名 | 默认值 | 描述 | -| -------- | -------- | ----- |----- | +| -------- | -------- | ----- |----- | | linkis-instance-label-client |wds.linkis.instance.label.server.name|linkis-ps-publicservice|instance.label.server.name| - - \ No newline at end of file diff --git a/docs/configuration/linkis-io-file-client.md b/docs/configuration/linkis-io-file-client.md index 07045e9f472..eb386b887fc 100644 --- a/docs/configuration/linkis-io-file-client.md +++ b/docs/configuration/linkis-io-file-client.md @@ -1,7 +1,7 @@ -## linkis-io-file-client 配置 +## linkis-io-file-client configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-io-file-client|wds.linkis.io.group.factory.capacity |1000|group.factory.capacity| |linkis-io-file-client|wds.linkis.io.group.factory.running.jobs|30 |group.factory.running.jobs| @@ -9,9 +9,3 @@ |linkis-io-file-client|wds.linkis.io.loadbalance.capacity| 1 |loadbalance.capacity| |linkis-io-file-client|wds.linkis.io.extra.labels| |extra.labels| |linkis-io-file-client|wds.linkis.io.job.wait.second| 30 | job.wait.second | - - - - - - diff --git a/docs/configuration/linkis-jdbc-driver.md b/docs/configuration/linkis-jdbc-driver.md index cf063f00eb1..656e4a76c1e 100644 --- a/docs/configuration/linkis-jdbc-driver.md +++ b/docs/configuration/linkis-jdbc-driver.md @@ -1,7 +1,5 @@ ## linkis-jdbc-driver 配置 - | 模块名(服务名) | 参数名 | 默认值 | 描述 | -| -------- | -------- | ----- |----- | +| -------- | -------- | ----- |----- | | linkis-jdbc-driver |wds.linkis.jdbc.pre.hook.class|org.apache.linkis.ujes.jdbc.hook.impl.TableauPreExecutionHook|pre.hook.class| - diff --git a/docs/configuration/linkis-jobhistory.md b/docs/configuration/linkis-jobhistory.md index 8046b185b99..34025ec54da 100644 --- a/docs/configuration/linkis-jobhistory.md +++ b/docs/configuration/linkis-jobhistory.md @@ -1,7 +1,7 @@ -## linkis-jobhistory 配置 +## linkis-jobhistory configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-jobhistory|wds.linkis.jobhistory.safe.trigger |true|jobhistory.safe.trigger| |linkis-jobhistory|wds.linkis.entrance.spring.name| linkis-cg-entrance |entrance.spring.name| @@ -15,7 +15,3 @@ |linkis-jobhistory|wds.linkis.env.is.viewfs| true|env.is.viewfs| |linkis-jobhistory|wds.linkis.query.store.suffix| |linkis.query.store.suffix| |linkis-jobhistory|wds.linkis.query.code.store.length|50000| query.code.store.length| - - - - diff --git a/docs/configuration/linkis-manager-common.md b/docs/configuration/linkis-manager-common.md index 776521ea98d..9ee1032d5c5 100644 --- a/docs/configuration/linkis-manager-common.md +++ b/docs/configuration/linkis-manager-common.md @@ -1,7 +1,7 @@ -## linkis-manager-common 配置 +## linkis-manager-common configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-manager-common|wds.linkis.default.engine.type |spark|engine.type| |linkis-manager-common|wds.linkis.manager.admin|2.4.3|manager.admin| @@ -36,9 +36,3 @@ |linkis-manager-common|wds.linkis.rm.default.yarn.cluster.type| Yarn|yarn.cluster.type| |linkis-manager-common|wds.linkis.rm.external.retry.num|3|external.retry.num| |linkis-manager-common|wds.linkis.rm.default.yarn.webaddress.delimiter| ; | yarn.webaddress.delimiter| - - - - - - diff --git a/docs/configuration/linkis-metadata.md b/docs/configuration/linkis-metadata.md index 612e56d207f..8bdd35f861e 100644 --- a/docs/configuration/linkis-metadata.md +++ b/docs/configuration/linkis-metadata.md @@ -1,7 +1,7 @@ -## linkis-metadata 配置 +## linkis-metadata configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | +| Module Name (Service Name) | Parameter Name | Default Value | Description | | -------- | -------- | ----- |----- | | linkis-metadata |bdp.dataworkcloud.datasource.store.type|orc|datasource.store.type| | linkis-metadata |bdp.dataworkcloud.datasource.default.par.name|ds|datasource.default.par.name| diff --git a/docs/configuration/linkis-module.md b/docs/configuration/linkis-module.md index 6e8fc96d9ca..09bcab1262a 100644 --- a/docs/configuration/linkis-module.md +++ b/docs/configuration/linkis-module.md @@ -1,7 +1,7 @@ -## linkis-module 配置 +## linkis-module configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-module|wds.linkis.server.component.exclude.packages| | exclude.packages |true| |linkis-module|wds.linkis.server.component.exclude.classes| |exclude.classes|true| @@ -47,5 +47,3 @@ |linkis-module|wds.linkis.session.proxy.user.ticket.key|linkis_user_session_proxy_ticket_id_v1 |ticket.key |true| |linkis-module|wds.linkis.proxy.ticket.header.crypt.key| linkis-trust-key |crypt.key |true| |linkis-module|wds.linkis.proxy.ticket.header.crypt.key| bfs_ | crypt.key|true| - - diff --git a/docs/configuration/linkis-orchestrator-core.md b/docs/configuration/linkis-orchestrator-core.md index fed8229a271..5520c8a00f0 100644 --- a/docs/configuration/linkis-orchestrator-core.md +++ b/docs/configuration/linkis-orchestrator-core.md @@ -1,7 +1,7 @@ -## linkis-orchestrator-core 配置 +## linkis-orchestrator-core configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-orchestrator-core|wds.linkis.orchestrator.builder.class | |orchestrator.builder.class| |linkis-orchestrator-core|wds.linkis.orchestrator.version|1.0.0|orchestrator.version| @@ -24,5 +24,3 @@ |linkis-orchestrator-core|wds.linkis.orchestrator.task.schedulis.creator| schedulis,nodeexecution|task.schedulis.creator| |linkis-orchestrator-core|wds.linkis.orchestrator.metric.log.enable|true|orchestrator.metric.log.enable| |linkis-orchestrator-core|wds.linkis.orchestrator.metric.log.time| 1h |orchestrator.metric.log.time| - - diff --git a/docs/configuration/linkis-protocol.md b/docs/configuration/linkis-protocol.md index 980fe061a9d..cd8534e64d4 100644 --- a/docs/configuration/linkis-protocol.md +++ b/docs/configuration/linkis-protocol.md @@ -1,8 +1,6 @@ -## linkis-protocol 配置 +## linkis-protocol configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-protocol|wds.linkis.service.suffix| engineManager,entrance,engine | service.suffix |true| - - diff --git a/docs/configuration/linkis-rpc.md b/docs/configuration/linkis-rpc.md index e5d74d2d09b..1acdeb681d1 100644 --- a/docs/configuration/linkis-rpc.md +++ b/docs/configuration/linkis-rpc.md @@ -1,7 +1,7 @@ -## linkis-rpc 配置 +## linkis-rpc configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-rpc|wds.linkis.rpc.broadcast.thread.num| 25 | thread.num |true| |linkis-rpc|wds.linkis.rpc.eureka.client.refresh.interval| 1s |refresh.interval|true| @@ -23,6 +23,3 @@ |linkis-rpc|wds.linkis.ms.service.scan.package|org.apache.linkis |scan.package|true| |linkis-rpc|wds.linkis.rpc.spring.params.enable| false | params.enable |true| |linkis-rpc|wds.linkis.rpc.cache.expire.time|120000L |expire.time|true| - - - diff --git a/docs/configuration/linkis-scheduler.md b/docs/configuration/linkis-scheduler.md index c01ac8c7473..683ff5ecc22 100644 --- a/docs/configuration/linkis-scheduler.md +++ b/docs/configuration/linkis-scheduler.md @@ -1,10 +1,9 @@ -## linkis-scheduler 配置 +## linkis-scheduler configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-scheduler|wds.linkis.fifo.consumer.auto.clear.enabled|true | auto.clear.enabled|true| |linkis-scheduler|wds.linkis.fifo.consumer.max.idle.time|1h |max.idle.time|true| |linkis-scheduler|wds.linkis.fifo.consumer.idle.scan.interval|2h |idle.scan.interval|true| |linkis-scheduler|wds.linkis.fifo.consumer.idle.scan.init.time|1s | idle.scan.init.time |true| - diff --git a/docs/configuration/linkis-spring-cloud-gateway.md b/docs/configuration/linkis-spring-cloud-gateway.md index d6209b33aeb..37bb005f09e 100644 --- a/docs/configuration/linkis-spring-cloud-gateway.md +++ b/docs/configuration/linkis-spring-cloud-gateway.md @@ -1,7 +1,5 @@ ## linkis-spring-cloud-gateway 配置 - | 模块名(服务名) | 参数名 | 默认值 | 描述 | -| -------- | -------- | ----- |----- | +| -------- | -------- | ----- |----- | | linkis-spring-cloud-gateway |wds.linkis.gateway.websocket.heartbeat|5s|gateway.websocket.heartbeat| - diff --git a/docs/configuration/linkis-storage.md b/docs/configuration/linkis-storage.md index d773c0f6104..45b07afc1d2 100644 --- a/docs/configuration/linkis-storage.md +++ b/docs/configuration/linkis-storage.md @@ -1,7 +1,7 @@ -## linkis-storage 配置 +## linkis-storage configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-storage|wds.linkis.storage.proxy.user| | storage.proxy.user |true| |linkis-storage|wds.linkis.storage.root.user| hadoop |storage.root.user|true| @@ -30,6 +30,3 @@ |linkis-storage|wds.linkis.hdfs.rest.errs| |rest.errs|true| |linkis-storage|wds.linkis.resultset.row.max.str | 2m | max.str |true| |linkis-storage|wds.linkis.storage.file.type | dolphin,sql,scala,py,hql,python,out,log,text,sh,jdbc,ngql,psql,fql,tsql | file.type |true| - - - diff --git a/docs/configuration/linkis-udf.md b/docs/configuration/linkis-udf.md index b21020e232f..76a9460cfae 100644 --- a/docs/configuration/linkis-udf.md +++ b/docs/configuration/linkis-udf.md @@ -1,11 +1,10 @@ -## linkis-udf 配置 +## linkis-udf configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |linkis-udf|wds.linkis.udf.hive.exec.path |/appcom/Install/DataWorkCloudInstall/linkis-linkis-Udf-0.0.3-SNAPSHOT/lib/hive-exec-1.2.1.jar|udf.hive.exec.path| |linkis-udf|wds.linkis.udf.tmp.path|/tmp/udf/|udf.tmp.path| |linkis-udf|wds.linkis.udf.share.path|/mnt/bdap/udf/|udf.share.path| |linkis-udf|wds.linkis.udf.share.proxy.user| hadoop|udf.share.proxy.user| |linkis-udf|wds.linkis.udf.service.name|linkis-ps-publicservice |udf.service.name| - \ No newline at end of file diff --git a/docs/configuration/pipeline.md b/docs/configuration/pipeline.md index 03f1f1cdc7b..d80b6dc946a 100644 --- a/docs/configuration/pipeline.md +++ b/docs/configuration/pipeline.md @@ -1,13 +1,10 @@ -## pipeline 配置 +## pipeline configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | +| Module Name (Service Name) | Parameter Name | Default Value | Description | | -------- | -------- | ----- |----- | |pipeline| pipeline.output.isoverwtite | true | isoverwtite | |pipeline|pipeline.output.charset|UTF-8|charset| |pipeline|pipeline.field.split| , |split| |pipeline|wds.linkis.engine.pipeline.field.quote.retoch.enable|false | field.quote.retoch.enable | |pipeline|wds.linkis.pipeline.export.excel.auto_format.enable| false | auto_format.enable - - - diff --git a/docs/configuration/presto.md b/docs/configuration/presto.md index fca90759a3e..f376c94b8b5 100644 --- a/docs/configuration/presto.md +++ b/docs/configuration/presto.md @@ -1,7 +1,7 @@ -## presto 配置 +## presto configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |presto|wds.linkis.engineconn.concurrent.limit|100|engineconn.concurrent.limit| |presto|wds.linkis.entrance.max.job.instance|100|entrance.max.job.instance| @@ -19,5 +19,3 @@ |presto|wds.linkis.presto.source| |presto.source| |presto|wds.linkis.presto.source| global |presto.source| |presto|presto.session.query_max_total_memory|8GB|query_max_total_memory| - - diff --git a/docs/configuration/python.md b/docs/configuration/python.md index 5e3687377dc..318ce4774db 100644 --- a/docs/configuration/python.md +++ b/docs/configuration/python.md @@ -1,10 +1,9 @@ -## python 配置 +## python configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |python|wds.linkis.python.line.limit|10|python.line.limit| |python|wds.linkis.python.py4j.home |getPy4jHome |python.py4j.home| |python|wds.linkis.engine.python.language-repl.init.time|30s|python.language-repl.init.time| |python|pythonVersion|python3|pythonVersion| - diff --git a/docs/configuration/spark.md b/docs/configuration/spark.md index 49906434644..6c8abfcb640 100644 --- a/docs/configuration/spark.md +++ b/docs/configuration/spark.md @@ -1,7 +1,7 @@ -## spark 配置 +## spark configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |spark|linkis.bgservice.store.prefix|hdfs:///tmp/bdp-ide/|bgservice.store.prefix| |spark|linkis.bgservice.store.suffix| |bgservice.store.suffix| @@ -25,7 +25,3 @@ |spark|wds.linkis.spark.engine.is.viewfs.env| true | spark.engine.is.viewfs.env| |spark|wds.linkis.spark.engineconn.fatal.log|error writing class;OutOfMemoryError|spark.engineconn.fatal.log| |spark|wds.linkis.spark.engine.scala.replace_package_header.enable| true |spark.engine.scala.replace_package_header.enable| - - - - \ No newline at end of file diff --git a/docs/configuration/sqoop.md b/docs/configuration/sqoop.md index 85eaa87d911..2fb26da7ee5 100644 --- a/docs/configuration/sqoop.md +++ b/docs/configuration/sqoop.md @@ -1,7 +1,7 @@ -## sqoop 配置 +## sqoop configure -| 模块名(服务名) | 参数名 | 默认值 | 描述 | 是否引用| +| Module Name (Service Name) | Parameter Name | Default Value | Description |Used| | -------- | -------- | ----- |----- | ----- | |sqoop|wds.linkis.hadoop.site.xml |core-site.xml;hdfs-site.xml;yarn-site.xml;mapred-site.xml| hadoop.site.xml| |sqoop|sqoop.fetch.status.interval|5s |status.interval| diff --git a/docs/configuration/trino.md b/docs/configuration/trino.md new file mode 100644 index 00000000000..21a33d5d4e8 --- /dev/null +++ b/docs/configuration/trino.md @@ -0,0 +1,22 @@ +## trino 配置 + +| 模块名(服务名) | 参数名 | 默认值 |描述 |是否引用| +| ------------ | ---------------------------------------| ----------------------|---------------------------------------- | ----- | +| trino | linkis.trino.default.limit | 5000 | 查询的结果集返回条数限制 | +| trino | linkis.trino.http.connectTimeout | 60 | 连接Trino服务器的超时时间(秒) | +| trino | linkis.trino.http.readTimeout | 60 | 等待Trino服务器返回数据的超时时间(秒) | +| trino | linkis.trino.resultSet.cache.max | 512k | Trino结果集缓冲区大小 | +| trino | linkis.trino.url | http://127.0.0.1:8080 | Trino服务器URL | +| trino | linkis.trino.user | null | 用于连接Trino查询服务的用户名 | +| trino | linkis.trino.password | null | 用于连接Trino查询服务的密码 | +| trino | linkis.trino.passwordCmd | null | 用于连接Trino查询服务的密码回调命令 | +| trino | linkis.trino.catalog | system | 连接Trino查询时使用的catalog | +| trino | linkis.trino.schema | | 连接Trino查询服务的默认schema | +| trino | linkis.trino.ssl.insecured | false | 是否忽略服务器的SSL证书 | +| trino | linkis.engineconn.concurrent.limit | 100 | 引擎最大并发 | +| trino | linkis.trino.ssl.keystore | null | Trino服务器SSL keystore路径 | +| trino | linkis.trino.ssl.keystore.type | null | Trino服务器SSL keystore类型 | +| trino | linkis.trino.ssl.keystore.password | null | Trino服务器SSL keystore密码 | +| trino | linkis.trino.ssl.truststore | null | Trino服务器SSL truststore路径 | +| trino | linkis.trino.ssl.truststore.type | null | Trino服务器SSL truststore类型 | +| trino | linkis.trino.ssl.truststore.password | null | Trino服务器SSL truststore密码 | diff --git a/docs/errorcode/linkis-bml-client.md b/docs/errorcode/linkis-bml-client.md new file mode 100644 index 00000000000..0210a64112a --- /dev/null +++ b/docs/errorcode/linkis-bml-client.md @@ -0,0 +1,9 @@ +## linkis-bml-client errorcode + + +| 模块名(服务名) | 错误码 | 描述 | module | +| -------- |-------| ----- |---------| +|linkis-bml-client| 20060 |the result returned by the repository client POST request does not match(物料库客户端POST请求返回的result不匹配)|bmlClient| +|linkis-bml-client| 20061 |failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)|bmlClient| +|linkis-bml-client| 20062 |serverUrl cannot be null(服务器URL不能为空)|bmlClient| + diff --git a/docs/errorcode/linkis-common-errorcode.md b/docs/errorcode/linkis-common-errorcode.md index 041860fe1f2..989d57759cc 100644 --- a/docs/errorcode/linkis-common-errorcode.md +++ b/docs/errorcode/linkis-common-errorcode.md @@ -1,6 +1,5 @@ ## linkis-common errorcode - | 模块名(服务名) | 错误码 | 描述 | module| | -------- | -------- | ----- |-----| |linkis-common|11000|Engine failed to start(引擎启动失败)|hiveEngineConn| @@ -10,9 +9,3 @@ |linkis-common|20100|User is empty in the parameters of the request engine(请求引擎的参数中user为空)|EngineConnManager| |linkis-common|321|Failed to start under certain circumstances(在某种情况下启动失败)|EngineConnManager| |linkis-common|10000|Error code definition exceeds the maximum value or is less than the minimum value(错误码定义超过最大值或者小于最小值)|linkis-frame| - - - - - - diff --git a/docs/errorcode/linkis-configuration -errorcode.md b/docs/errorcode/linkis-configuration -errorcode.md new file mode 100644 index 00000000000..a829eea6487 --- /dev/null +++ b/docs/errorcode/linkis-configuration -errorcode.md @@ -0,0 +1,22 @@ +## linkis-configuration errorcode + +| 模块名(服务名) | 错误码 | 描述 | Exception Class| +| -------- | -------- | ----- |-----| +|linkis-configuration |14100|Failed to build label(建立标签失败)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|Failed to build label ,label is null(建立标签失败,标签为空)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|config key not exists:(配置键不存在:)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|label not exists:(标签不存在:)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|key or value cannot be null(键或值不能为空)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|params cannot be empty!(参数不能为空!)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|token is error(令牌是错误的)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|categoryName is null, cannot be added(categoryName 为空,无法添加)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|categoryName cannot be included '-'(类别名称不能包含 '-')|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|creator is null, cannot be added(创建者为空,无法添加)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|engine type is null, cannot be added(引擎类型为空,无法添加)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-2.4.3(保存的引擎类型参数有误,请按照固定格式传送,例如spark-2.4.3)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|Incomplete request parameters, please reconfirm(请求参数不完整,请重新确认)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|only admin can modify category(只有管理员才能修改目录)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100| The label parameter is empty(标签参数为空)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|error validator range!(错误验证器范围!)|LinkisConfigurationErrorCodeSummary| +|linkis-configuration |14100|this type of label is not supported:{}(不支持这种类型的标签:{})|LinkisConfigurationErrorCodeSummary| + diff --git a/docs/errorcode/linkis-cs-server-errorcode.md b/docs/errorcode/linkis-cs-server-errorcode.md new file mode 100644 index 00000000000..d44b603ead1 --- /dev/null +++ b/docs/errorcode/linkis-cs-server-errorcode.md @@ -0,0 +1,5 @@ +## linkis-cs-server errorcode + +| 模块名(服务名) | 错误码 | 描述 | Exception Class| +| -------- | -------- | ----- |-----| +|linkis-cs-server|1200001|Unknown Condition Type(未知条件类型)|LinkisCsServerErrorCodeSummary| diff --git a/docs/errorcode/linkis-datasource-manager-errorcode.md b/docs/errorcode/linkis-datasource-manager-errorcode.md new file mode 100644 index 00000000000..6fe139c07e1 --- /dev/null +++ b/docs/errorcode/linkis-datasource-manager-errorcode.md @@ -0,0 +1,15 @@ +## linkis-datasource-manager errorcode + +| 模块名(服务名) | 错误码 | 描述 | Exception Class| +| -------- | -------- | ----- |-----| +|linkis-datasource-manager|16897|Unable to deserialize to object from string(json) in type: (序列化失败)|LinkisDatasourceManagerErrorCodeSummary| +|linkis-datasource-manager|16898|cannot be serialized (无法序列化)|LinkisDatasourceManagerErrorCodeSummary| +|linkis-datasource-manager|99983|Connection Failed(连接失败)|LinkisDatasourceManagerErrorCodeSummary| +|linkis-datasource-manager|99983|Remote Service Error(远端服务出错, 联系运维处理)|LinkisDatasourceManagerErrorCodeSummary| +|linkis-datasource-manager|99986|Param Validate Failed(参数校验出错)|LinkisDatasourceManagerErrorCodeSummary| +|linkis-datasource-manager|99987|Fail to operate file in request(上传文件处理失败)|LinkisDatasourceManagerErrorCodeSummary| +|linkis-datasource-manager|99988|datasource not found(未找到数据源)|LinkisDatasourceManagerErrorCodeSummary| + + + + \ No newline at end of file diff --git a/docs/errorcode/linkis-engineplugin-hive.md b/docs/errorcode/linkis-engineplugin-hive.md new file mode 100644 index 00000000000..105e91cef57 --- /dev/null +++ b/docs/errorcode/linkis-engineplugin-hive.md @@ -0,0 +1,10 @@ +## linkis-engineplugin-hive errorcode + + +| 模块名(服务名) | 错误码 | 描述 | module | +|--------------------------|-------| ---- |--------------| +| linkis-engineplugin-hive | 26040 |failed to create hive executor(创建hive执行器失败)| hiveEngine | +| linkis-engineplugin-hive | 26041 |cannot find hive-exec.jar, start session failed(找不到 hive-exec.jar,启动会话失败)| hiveEngine | +| linkis-engineplugin-hive | 26042 |cannot get the field schemas(无法获取字段模式)| hiveEngine | +| linkis-engineplugin-hive | 26043 |invalid value(无效值)| hiveEngine | + diff --git a/docs/errorcode/linkis-engineplugin-openlookeng-errorcode.md b/docs/errorcode/linkis-engineplugin-openlookeng-errorcode.md new file mode 100644 index 00000000000..8b2e6857e26 --- /dev/null +++ b/docs/errorcode/linkis-engineplugin-openlookeng-errorcode.md @@ -0,0 +1,12 @@ +## linkis-engineplugin-openlookeng errorcode + + +| 模块名(服务名) | 错误码 | 描述 | module | +| -------- |-------| ----- |-----------------------------| +|linkis-engineplugin-openlookeng| 26030 |openlookeng client error(openlookeng客户端异常)|openlookengEngineConnExecutor| +|linkis-engineplugin-openlookeng| 26031 |openlookeng status error,Statement is not finished(openlookeng状态异常, 查询语句未完成)|openlookengEngineConnExecutor| + + + + + diff --git a/docs/errorcode/linkis-entrance-errorcode.md b/docs/errorcode/linkis-entrance-errorcode.md new file mode 100644 index 00000000000..581ad8e3c90 --- /dev/null +++ b/docs/errorcode/linkis-entrance-errorcode.md @@ -0,0 +1,34 @@ +## linkis-entrance errorcode + +| module name(模块名) | error code(错误码) | describe(描述) |enumeration name(枚举)| Exception Class(类名)| +| -------- | -------- | ----- |-----|-----| +|linkis-entrance |10000|Unsupported operation(不支持的操作)|UNSUPPORTED_OPERATION|EntranceErrorCodeSummary| +|linkis-entrance |20001|JobReq can't be null(JobReq不能为空)|JOBREQ_NOT_NULL|EntranceErrorCodeSummary| +|linkis-entrance |20001|The label of userCreator or engineType cannot be null(标签 userCreator 或 engineType 不能为空)|LABEL_NOT_NULL|EntranceErrorCodeSummary| +|linkis-entrance |20001|Task is not requestPersistTask, cannot to create Executor(Task不是requestPersistTask,不能创建Executor)|NOT_CREATE_EXECUTOR|EntranceErrorCodeSummary| +|linkis-entrance |20001|Task is not EntranceJob, cannot to create Executor(Task 不是 EntranceJob,不能创建 Executor)|ENTRA_NOT_CREATE_EXECUTOR|EntranceErrorCodeSummary| +|linkis-entrance |20004|JobRequest cannot be null, unable to do persist operation(JobRequest 不能为空,无法进行持久化操作)|JOBREQUEST_NOT_NULL|EntranceErrorCodeSummary| +|linkis-entrance |20004|The instance can't be null(实例不能为空)|INSTANCE_NOT_NULL|EntranceErrorCodeSummary| +|linkis-entrance |20005|The execute user can't be null(执行用户不能为空)|EXECUTEUSER_NOT_NULL|EntranceErrorCodeSummary| +|linkis-entrance |20007|The param executionCode can not be empty (参数 executionCode 不能为空)|PARAM_NOT_NULL|EntranceErrorCodeSummary| +|linkis-entrance |20007|The param executionCode and scriptPath can not be empty at the same time(参数 executionCode 和 scriptPath 不能同时为空)|EXEC_SCRIP_NOT_NULL|EntranceErrorCodeSummary| +|linkis-entrance |20010|Only code with runtype supported (仅支持运行类型的代码)|ONLY_CODE_SUPPORTED|EntranceErrorCodeSummary| +|linkis-entrance |20011|Request jobHistory failed, reason (请求jobHistory失败,原因):|REQUEST_JOBHISTORY_FAILED|EntranceErrorCodeSummary| +|linkis-entrance |20011|Request jobHistory failed, reason: jobRespProtocol is null (请求jobHistory失败,原因:jobRespProtocol为null)|JOBRESP_PROTOCOL_NULL|EntranceErrorCodeSummary| +|linkis-entrance |20011|The read all tasks failed, reason (读取所有任务失败,原因):|READ_TASKS_FAILED|EntranceErrorCodeSummary| +|linkis-entrance |20020|Sender rpc failed(发件人 RPC 失败)|SENDER_RPC_FAILED|EntranceErrorCodeSummary| +|linkis-entrance |20039|Failed to analysis task ! the reason is(分析任务失败!原因是):|FAILED_ANALYSIS_TASK|EntranceErrorCodeSummary| +|linkis-entrance |20052|Invalid engineType null, cannot use cache(无效的engineType null,不能使用缓存)|INVALID_ENGINETYPE_NULL|EntranceErrorCodeSummary| +|linkis-entrance |20052|Persist jobRequest error, please submit again later(存储Job异常,请稍后重新提交任务)|PERSIST_JOBREQUEST_ERROR|EntranceErrorCodeSummary| +|linkis-entrance |20053|Invalid resultsets, cannot use cache(结果集无效,无法使用缓存)|INVALID_RESULTSETS|EntranceErrorCodeSummary| +|linkis-entrance |30009|Submitting the query failed!(提交查询失败!)|SUBMITTING_QUERY_FAILED|EntranceErrorCodeSummary| +|linkis-entrance |50081|Query from jobHistory status failed(从 jobHistory 状态查询失败)|QUERY_STATUS_FAILED|EntranceErrorCodeSummary| +|linkis-entrance |50081|Get query response incorrectly(错误地获取查询响应)|GET_QUERY_RESPONSE|EntranceErrorCodeSummary| +|linkis-entrance |50081|Query taskId error,taskId(查询 taskId 错误,taskId):|QUERY_TASKID_ERROR|EntranceErrorCodeSummary| +|linkis-entrance |50081|Query from jobhistory with incorrect list type of taskId, the taskId is ( 从jobhistory 中查询的参数类型不正确,taskId为):|CORRECT_LIST_TYPR|EntranceErrorCodeSummary| +|linkis-entrance |50081|Shell code contains blacklisted code(shell中包含黑名单代码)|SHELL_BLACKLISTED_CODE|EntranceErrorCodeSummary| +|linkis-entrance |20301|The logPath cannot be empty(日志路径不能为空)|LOGPATH_NOT_NULL|EntranceErrorCodeSummary| + + + + diff --git a/docs/errorcode/linkis-gateway-authentication-errorcode.md b/docs/errorcode/linkis-gateway-authentication-errorcode.md new file mode 100644 index 00000000000..e468b65ed6a --- /dev/null +++ b/docs/errorcode/linkis-gateway-authentication-errorcode.md @@ -0,0 +1,12 @@ +## linkis-gateway-authentication errorcode + +| 模块名(服务名) | 错误码 | 描述 | Exception Class| +| -------- | -------- | ----- |-----| +|linkis-gateway-authentication |15205|token is null!(令牌为空!)|LinkisGwAuthenticationErrorCodeSummary| +|linkis-gateway-authentication |15200|Failed to load token from DB into cache!(无法将令牌从数据库加载到缓存中!)|LinkisGwAuthenticationErrorCodeSummary| +|linkis-gateway-authentication |15201|Token is not valid or stale!(令牌无效或陈旧!)|LinkisGwAuthenticationErrorCodeSummary| +|linkis-gateway-authentication |15202|Illegal TokenUser for Token!(代币非法用户!)|LinkisGwAuthenticationErrorCodeSummary| +|linkis-gateway-authentication |15203|Illegal Host for Token!(Token非法主机!)|LinkisGwAuthenticationErrorCodeSummary| +|linkis-gateway-authentication |15204|Invalid Token(令牌无效)|LinkisGwAuthenticationErrorCodeSummary| + + diff --git a/docs/errorcode/linkis-gateway-httpclient-support_errorcode.md b/docs/errorcode/linkis-gateway-httpclient-support_errorcode.md new file mode 100644 index 00000000000..54b02180e0c --- /dev/null +++ b/docs/errorcode/linkis-gateway-httpclient-support_errorcode.md @@ -0,0 +1,13 @@ +## linkis-gateway-httpclient-support errorcode + +| 模块名(服务名) | 错误码 | 描述 | Exception Class| +| -------- | -------- | ----- |-----| +|linkis-gateway-httpclient-support|10901|the value of authTokenValue in ClientConfig must be exists, since no password is found to login(ClientConfig中authTokenValue的值必须存在,因为没有找到密码登录.)|LinkisGwHttpclientSupportErrorCodeSummary| +|linkis-gateway-httpclient-support|10901|cannot use token authentication, since no user is found to proxy(无法使用令牌身份验证,因为找不到代理用户)|LinkisGwHttpclientSupportErrorCodeSummary| +|linkis-gateway-httpclient-support|10901|ClientConfig must specify the DWS version(ClientConfig必须指定DWS版本)|LinkisGwHttpclientSupportErrorCodeSummary| + + + + + + diff --git a/docs/errorcode/linkis-instance-label-errorcode.md b/docs/errorcode/linkis-instance-label-errorcode.md new file mode 100644 index 00000000000..7f38e00a1e7 --- /dev/null +++ b/docs/errorcode/linkis-instance-label-errorcode.md @@ -0,0 +1,8 @@ +## linkis-instance-label errorcode + +| 模块名(服务名) | 错误码 | 描述 | Exception Class| +| -------- | -------- | ----- |-----| +|linkis-instance-label |14100|Failed to insert service instance(插入服务实例失败)|LinkisInstanceLabelErrorCodeSummary| +|linkis-instance-label |14100|Only admin can view all instances(只有管理员才能查看所有实例).|LinkisInstanceLabelErrorCodeSummary| +|linkis-instance-label |14100|Only admin can modify instance label(只有管理员才能修改标签).|LinkisInstanceLabelErrorCodeSummary| +|linkis-instance-label |14100|Failed to update label, include repeat label(更新label失败,包含重复label)|LinkisInstanceLabelErrorCodeSummary| diff --git a/docs/errorcode/linkis-metadata-errorcode.md b/docs/errorcode/linkis-metadata-errorcode.md new file mode 100644 index 00000000000..fdf74c99f0a --- /dev/null +++ b/docs/errorcode/linkis-metadata-errorcode.md @@ -0,0 +1,9 @@ +## linkis-metadata errorcode + +| 模块名(服务名) | 错误码 | 描述 | Exception Class| +| -------- | -------- | ----- |-----| +| linkis-metadata|57895| unrecognized import type(无法识别的导入类型)|LinkisMetadataErrorCodeSummary| +| linkis-metadata|57895|import hive source is null(导入配置单元源为空)|LinkisMetadataErrorCodeSummary| +| linkis-metadata|57895|Hive create table destination database or tablename is null(Hive 创建表目标数据库或表名为空)|LinkisMetadataErrorCodeSummary| +| linkis-metadata|57895|hive create table source table name is null(hive 创建表源表名为空)|LinkisMetadataErrorCodeSummary| +| linkis-metadata|57895|partition name or type is null(分区名称或类型为空)|LinkisMetadataErrorCodeSummary| diff --git a/docs/errorcode/linkis-rpc-errorcode.md b/docs/errorcode/linkis-rpc-errorcode.md new file mode 100644 index 00000000000..b9f98e664e0 --- /dev/null +++ b/docs/errorcode/linkis-rpc-errorcode.md @@ -0,0 +1,23 @@ +## linkis-rpc errorcode + + +| 模块名(服务名) | 错误码 | 描述 | class| +| -------- | -------- | ----- |-----| +|linkis-rpc|10021|Failed to get user parameters! Reason: RPC request(获取用户参数失败!原因:RPC请求) #applicationName Service failed!(服务失败!)|RPCErrorConstants| +|linkis-rpc|10054|Asyn RPC Consumer Thread has stopped!(Asyn RPC Consumer 线程已停止!)|RPCErrorConstants| +|linkis-rpc|10000|The service does not exist for the available Receiver.(服务不存在可用的Receiver.) |EngineConnManager| +|linkis-rpc|15555| Asyn RPC Consumer Queue is full, please retry after some times.(Asyn RPC Consumer Queue 已满,请稍后重试。)|DWCRPCRetryException| +|linkis-rpc|10002|The timeout period is not set!(超时时间未设置!)|LinkisRpcErrorCodeSummary| +|linkis-rpc|10003|The corresponding anti-sequence class $objectClass was not found:(找不到对应的反序列类:)|LinkisRpcErrorCodeSummary| +|linkis-rpc|10004|The corresponding anti-sequence class failed to initialize:(对应的反序列类初始化失败:)|LinkisRpcErrorCodeSummary| +|linkis-rpc|10001|The transmitted bean is Null.(传输的bean为Null.|LinkisRpcErrorCodeSummary| +|linkis-rpc|10051|The instance ## of application ## is not exists.(应用程序## 的实例## 不存在。)|LinkisRpcErrorCodeSummary| +|linkis-rpc|10000|method call failed:(方法调用失败:)|LinkisRpcErrorCodeSummary| + + + + + + + + diff --git a/docs/errorcode/linkis-storage -errorcode.md b/docs/errorcode/linkis-storage -errorcode.md new file mode 100644 index 00000000000..787efb15041 --- /dev/null +++ b/docs/errorcode/linkis-storage -errorcode.md @@ -0,0 +1,26 @@ +## linkis-storage errorcode + +| 模块名(服务名) | 错误码 | 描述 | Exception Class| +| -------- | -------- | ----- |-----| +|linkis-storage |52000|FSNotInitException|LinkisStorageErrorCodeSummary| +|linkis-storage |52004|You must register IOClient before you can use proxy mode.(必须先注册IOClient,才能使用代理模式)|LinkisStorageErrorCodeSummary| +|linkis-storage |52004|You must register IOMethodInterceptorCreator before you can use proxy mode.(必须先注册IOMethodInterceptorCreator,才能使用代理模式)|LinkisStorageErrorCodeSummary| +|linkis-storage |50000|Unsupported result type(不支持的结果类型):{}|LinkisStorageErrorCodeSummary| +|linkis-storage |51000|The file{}is empty(文件{}为空)|LinkisStorageErrorCodeSummary| +|linkis-storage |52002|Result sets that are not tables are not supported(不支持不是表格的结果集)|LinkisStorageErrorCodeSummary| +|linkis-storage |52001|Parsing metadata failed(解析元数据失败)|LinkisStorageErrorCodeSummary| +|linkis-storage |65000|Invalid custom parameter(不合法的自定义参数)|LinkisStorageErrorCodeSummary| +|linkis-storage |54001|Unsupported open file type(不支持打开的文件类型)|LinkisStorageErrorCodeSummary| +|linkis-storage |50000|Unsupported file system type(不支持的文件系统类型):{}|LinkisStorageErrorCodeSummary| +|linkis-storage |51000|failed to read integer(读取整数失败)|LinkisStorageErrorCodeSummary| +|linkis-storage |50001|HDFS configuration was not read, please configure hadoop.config.dir or add env:HADOOP_CONF_DIR(HDFS 配置未读取,请配置 hadoop.config.dir 或添加 env:HADOOP_CONF_DIR)|LinkisStorageErrorCodeSummary| +|linkis-storage |51001| |LinkisStorageErrorCodeSummary| +|linkis-storage |53003|Unsupported parameter calls(不支持的参数调用)|LinkisIoFileErrorCodeSummary| +|linkis-storage |52002|FS Can not proxy to:{}(FS 不能代理到:{}) |LinkisIoFileErrorCodeSummary| +|linkis-storage |53003|not exists method {} in fs {}(方法不存在) |LinkisIoFileErrorCodeSummary| +|linkis-storage |53002|The read method parameter cannot be empty(read方法参数不能为空)|LinkisIoFileErrorCodeSummary| +|linkis-storage |52002|proxy user not set, can not get the permission information.(没有设置代理 proxy 用户,无法获取权限信息)|LinkisIoFileClientErrorCodeSummary| +|linkis-storage |52002|Failed to init FS for user:(为用户初始化 FS 失败:)|LinkisIoFileClientErrorCodeSummary| +|linkis-storage |52002|has been closed, IO operation was illegal.(已经关闭,IO操作是非法的.)"|LinkisIoFileClientErrorCodeSummary| +|linkis-storage |52002|storage has been closed.(存储已关闭.)|LinkisIoFileClientErrorCodeSummary| +|linkis-storage |53001|please init first(请先初始化)|StorageErrorCode| diff --git a/docs/errorcode/sqoop-errorcode.md b/docs/errorcode/sqoop-errorcode.md new file mode 100644 index 00000000000..62161bc0a84 --- /dev/null +++ b/docs/errorcode/sqoop-errorcode.md @@ -0,0 +1,10 @@ +## sqoop errorcode + +| 模块名(服务名) | 错误码 | 描述 | Exception Class| +| -------- | -------- | ----- |-----| +|sqoop|16025|Unable to close the mapReduce job related to cluster(无法关闭与集群相关的 mapReduce 作业)|SqoopErrorCodeSummary| +|sqoop|16025|Error in closing sqoop client(关闭 sqoop 客户端时出错)|SqoopErrorCodeSummary| +|sqoop|16023|Not support method for requestExpectedResource.(不支持 requestExpectedResource 的方法)|SqoopErrorCodeSummary| +|sqoop|16023|Exec Sqoop Code Error(执行 Sqoop 代码错误)|SqoopErrorCodeSummary| +|sqoop|16023|New a instance of {} failed!(新建 {} 实例失败!)|SqoopErrorCodeSummary| + diff --git a/docs/configuration/index.md b/docs/index.md similarity index 58% rename from docs/configuration/index.md rename to docs/index.md index 28254c2978a..192bc98f537 100644 --- a/docs/configuration/index.md +++ b/docs/index.md @@ -1,7 +1,9 @@ +EN: This docs folder is only used to temporarily store some version documents. -For detailed documentation, please visit the official website -https://linkis.apache.org/docs/latest/introduction +For detailed documentation, please visit the official website + ------ +ZH: 这个docs文件夹,只是用来临时存放一些版本文档, -详细文档 请到官网查看 https://linkis.apache.org/zh-CN/docs/latest/introduction/ \ No newline at end of file +详细文档 请到官网查看 https://linkis.apache.org/zh-CN/docs/latest/introduction/ diff --git a/docs/configuration/info-1.1.3.md b/docs/info-1.1.3.md similarity index 97% rename from docs/configuration/info-1.1.3.md rename to docs/info-1.1.3.md index e0cdfc4e8e5..faca3daf204 100644 --- a/docs/configuration/info-1.1.3.md +++ b/docs/info-1.1.3.md @@ -1,5 +1,4 @@ -## 参数变化 - +## 参数变化 | 模块名(服务名)| 类型 | 参数名 | 默认值 | 描述 | | ----------- | ----- | -------------------------------------------------------- | ---------------- | ------------------------------------------------------- | diff --git a/docs/info-1.2.1.md b/docs/info-1.2.1.md new file mode 100644 index 00000000000..3bf27564f8a --- /dev/null +++ b/docs/info-1.2.1.md @@ -0,0 +1,28 @@ +## 参数变化 + +| 模块名(服务名)| 类型 | 参数名 | 默认值 | 描述 | +| ----------- | ----- | -------------------------------------------------------- | ---------------- | ------------------------------------------------------- | +|cg-engineplugin | 新增 | linkis.trino.default.limit | 5000 | Trino查询的结果集返回条数限制 | +|cg-engineplugin | 新增 | linkis.trino.http.connectTimeout | 60 | 连接Trino服务器的超时时间 | +|cg-engineplugin | 新增 | linkis.trino.http.readTimeout | 60 | 等待Trino服务器返回数据的超时时间 | +|cg-engineplugin | 新增 | linkis.trino.resultSet.cache.max | 512k | Trino结果集缓冲区大小 | +|cg-engineplugin | 新增 | linkis.trino.url | http://127.0.0.1:8080 | Trino服务器URL | +|cg-engineplugin | 新增 | linkis.trino.user | null | 用于连接Trino查询服务的用户名 | +|cg-engineplugin | 新增 | linkis.trino.password | null | 用于连接Trino查询服务的密码 | +|cg-engineplugin | 新增 | linkis.trino.passwordCmd | null | 用于连接Trino查询服务的密码回调命令 | +|cg-engineplugin | 新增 | linkis.trino.catalog | system | 连接Trino查询时使用的catalog | +|cg-engineplugin | 新增 | linkis.trino.schema | | 连接Trino查询服务的默认schema | +|cg-engineplugin | 新增 | linkis.trino.ssl.insecured | false | 是否忽略服务器的SSL证书 | +|cg-engineplugin | 新增 | linkis.trino.ssl.keystore | null | keystore路径 | +|cg-engineplugin | 新增 | linkis.trino.ssl.keystore.type | null | keystore类型 | +|cg-engineplugin | 新增 | linkis.trino.ssl.keystore.password | null | keystore密码 | +|cg-engineplugin | 新增 | linkis.trino.ssl.truststore | null | truststore路径 | +|cg-engineplugin | 新增 | linkis.trino.ssl.truststore.type | null | truststore类型 | +|cg-engineplugin | 新增 | linkis.trino.ssl.truststore.password | null | truststore密码 | + +## 特性说明 + +| 模块名(服务名)| 类型 | 特性 | +| ----------- | ---------------- | ------------------------------------------------------- | +|linkis-metadata-query-service-mysql | 新增 | 基于mysql 模块融合dm,greenplum,kingbase,oracle,postgres,sqlserver ,协议和sql 区分开,metadata-query 反射多个数据源,基于mysql模块扩展,融合为一个模块。| +|linkis-engineconn-plugins-trino | 新增 | 基于trino-client实现的Trino查询引擎。| diff --git a/docs/trino-usage.md b/docs/trino-usage.md new file mode 100644 index 00000000000..cfd199f8dba --- /dev/null +++ b/docs/trino-usage.md @@ -0,0 +1,150 @@ +--- +title: Trino 引擎 +sidebar_position: 12 +--- + +本文主要介绍在 Linkis1.X 中,Trino 引擎的配置、部署和使用。 + +## 1. 环境准备 + +如果您希望在您的服务器上使用 Trino 引擎,您需要准备 Trino 服务并提供连接信息,如 Trino 集群的连接地址、用户名和密码等 + +## 2. 部署和配置 + +### 2.1 版本的选择和编译 +注意: 编译 Trino 引擎之前需要进行 Linkis 项目全量编译 +发布的安装部署包中默认不包含此引擎插件, +你可以按[Linkis引擎安装指引](https://linkis.apache.org/zh-CN/blog/2022/04/15/how-to-download-engineconn-plugin)部署安装 ,或者按以下流程,手动编译部署 + +单独编译 Trino 引擎 + +``` +${linkis_code_dir}/linkis-engineconn-plugins/trino/ +mvn clean install +``` + +### 2.2 物料的部署和加载 + +将 2.1 步编译出来的引擎包,位于 +```bash +${linkis_code_dir}/linkis-engineconn-plugins/trino/target/out/trino +``` +上传到服务器的引擎目录下 +```bash +${LINKIS_HOME}/lib/linkis-engineplugins +``` +并重启linkis-engineplugin(或者通过引擎接口进行刷新) +```bash +cd ${LINKIS_HOME}/sbin +sh linkis-daemon.sh restart cg-engineplugin +``` +### 2.3 引擎的标签 + +Linkis1.X是通过标签来进行的,所以需要在我们数据库中插入数据,插入的方式如下文所示。 + +管理台的配置是按照引擎标签来进行管理的,如果新增的引擎,有配置参数需要配置的话,需要修改对应的表的元数据 + +``` +linkis_ps_configuration_config_key: 插入引擎的配置参数的key和默认values +linkis_cg_manager_label:插入引擎label如:hive-2.3.3 +linkis_ps_configuration_category: 插入引擎的目录关联关系 +linkis_ps_configuration_config_value: 插入引擎需要展示的配置 +linkis_ps_configuration_key_engine_relation:配置项和引擎的关联关系 +``` + +```sql +-- set variable +SET @ENGINE_LABEL="trino-371"; +SET @ENGINE_IDE=CONCAT('*-IDE,',@ENGINE_LABEL); +SET @ENGINE_ALL=CONCAT('*-*,',@TRINO_LABEL); +SET @ENGINE_NAME="trino"; + +-- add trino engine to IDE +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @ENGINE_ALL, 'OPTIONAL', 2, now(), now()); +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @ENGINE_IDE, 'OPTIONAL', 2, now(), now()); +select @label_id := id from `linkis_cg_manager_label` where label_value = @ENGINE_IDE; +insert into `linkis_ps_configuration_category` (`label_id`, `level`) VALUES (@label_id, 2); + +-- insert configuration key +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.default.limit', '查询的结果集返回条数限制', '结果集条数限制', '5000', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.http.connectTimeout', '连接Trino服务器的超时时间', '连接超时时间(秒)', '60', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.http.readTimeout', '等待Trino服务器返回数据的超时时间', '传输超时时间(秒)', '60', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.resultSet.cache.max', 'Trino结果集缓冲区大小', '结果集缓冲区', '512k', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.url', 'Trino服务器URL', 'Trino服务器URL', 'http://127.0.0.1:8080', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.user', '用于连接Trino查询服务的用户名', '用户名', 'null', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.password', '用于连接Trino查询服务的密码', '密码', 'null', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.passwordCmd', '用于连接Trino查询服务的密码回调命令', '密码回调命令', 'null', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.catalog', '连接Trino查询时使用的catalog', 'Catalog', 'system', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.schema', '连接Trino查询服务的默认schema', 'Schema', '', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.ssl.insecured', '是否忽略服务器的SSL证书', '验证SSL证书', 'false', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.engineconn.concurrent.limit', '引擎最大并发', '引擎最大并发', '100', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.ssl.keystore', 'Trino服务器SSL keystore路径', 'keystore路径', 'null', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.ssl.keystore.type', 'Trino服务器SSL keystore类型', 'keystore类型', 'null', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.ssl.keystore.password', 'Trino服务器SSL keystore密码', 'keystore密码', 'null', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.ssl.truststore', 'Trino服务器SSL truststore路径', 'truststore路径', 'null', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.ssl.truststore.type', 'Trino服务器SSL truststore类型', 'truststore类型', 'null', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.ssl.truststore.password', 'Trino服务器SSL truststore密码', 'truststore密码', 'null', 'None', '', @ENGINE_NAME, 0, 0, 1, '数据源配置'); + + +-- trino engine -* +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) +(select config.id as config_key_id, label.id AS engine_type_label_id FROM `linkis_ps_configuration_config_key` config +INNER JOIN `linkis_cg_manager_label` label ON config.engine_conn_type = @ENGINE_NAME and label_value = @ENGINE_ALL); + +-- trino engine default configuration +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) +(select relation.config_key_id AS config_key_id, '' AS config_value, relation.engine_type_label_id AS config_label_id FROM `linkis_ps_configuration_key_engine_relation` relation +INNER JOIN `linkis_cg_manager_label` label ON relation.engine_type_label_id = label.id AND label.label_value = @ENGINE_ALL); + +``` + +### 2.4 Trino 引擎相关配置 + +| 配置 | 默认值 |是否必须 | 说明 | +| ---------------------------------------| ----------------------|--------|---------------------------------------- | +| linkis.trino.default.limit | 5000 | 是 | 查询的结果集返回条数限制 | +| linkis.trino.http.connectTimeout | 60 | 是 | 连接Trino服务器的超时时间 | +| linkis.trino.http.readTimeout | 60 | 是 | 等待Trino服务器返回数据的超时时间 | +| linkis.trino.resultSet.cache.max | 512k | 是 | Trino结果集缓冲区大小 | +| linkis.trino.url | http://127.0.0.1:8080 | 是 | Trino服务器URL | +| linkis.trino.user | null | 否 | 用于连接Trino查询服务的用户名 | +| linkis.trino.password | null | 否 | 用于连接Trino查询服务的密码 | +| linkis.trino.passwordCmd | null | 否 | 用于连接Trino查询服务的密码回调命令 | +| linkis.trino.catalog | system | 否 | 连接Trino查询时使用的catalog | +| linkis.trino.schema | | 否 | 连接Trino查询服务的默认schema | +| linkis.trino.ssl.insecured | false | 是 | 是否忽略服务器的SSL证书 | +| linkis.engineconn.concurrent.limit | 100 | 否 | 引擎最大并发 | +| linkis.trino.ssl.keystore | null | 否 | Trino服务器SSL keystore路径 | +| linkis.trino.ssl.keystore.type | null | 否 | Trino服务器SSL keystore类型 | +| linkis.trino.ssl.keystore.password | null | 否 | Trino服务器SSL keystore密码 | +| linkis.trino.ssl.truststore | null | 否 | Trino服务器SSL truststore路径 | +| linkis.trino.ssl.truststore.type | null | 否 | Trino服务器SSL truststore类型 | +| linkis.trino.ssl.truststore.password | null | 否 | Trino服务器SSL truststore密码 | + +## 3. Trino +### 3.1 准备操作 +您需要配置Trino的连接信息,包括连接地址信息或用户名密码(如果启用)等信息。 + +![Trino](https://user-images.githubusercontent.com/12693319/195242035-d7e22f2c-f116-46a8-b3c5-4e2dea2ce37d.png) + +图3-1 Trino配置信息 + +您也可以再提交任务接口中的params.configuration.runtime进行修改即可 +```shell +linkis.trino.url +linkis.trino.user +linkis.trino.password +``` + +### 3.2 通过Linkis-cli进行任务提交 +**使用示例** + +Linkis 1.0后提供了cli的方式提交任务,我们只需要指定对应的EngineConn标签类型即可,Trino的使用如下: + +```shell + sh ./bin/linkis-cli -submitUser trino -engineType trino-371 -code 'select * from default.test limit 10' -runtimeMap linkis.es.http.method=GET -runtimeMap linkis.trino.url=127.0.0.1:8080 +``` + +## 4. Trino引擎的用户设置 + +Trino的用户设置主要是设置Trino的连接信息,但是建议用户将此密码等信息进行加密管理。 \ No newline at end of file diff --git a/linkis-commons/linkis-common/pom.xml b/linkis-commons/linkis-common/pom.xml index 888bb35e838..663951c2a7c 100644 --- a/linkis-commons/linkis-common/pom.xml +++ b/linkis-commons/linkis-common/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-common diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala index e19a1b05120..928b7fb6418 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala @@ -25,7 +25,7 @@ import org.apache.commons.lang3.StringUtils import java.io.{File, FileInputStream, InputStream, IOException} import java.util.Properties -import scala.collection.JavaConverters.mapAsJavaMapConverter +import scala.collection.JavaConverters._ private[conf] object BDPConfiguration extends Logging { @@ -127,13 +127,17 @@ private[conf] object BDPConfiguration extends Logging { def properties: Properties = { val props = new Properties - props.putAll(env.asJava) - props.putAll(sysProps.asJava) - props.putAll(config) - props.putAll(extractConfig) + mergePropertiesFromMap(props, env) + mergePropertiesFromMap(props, sysProps.toMap) + mergePropertiesFromMap(props, config.asScala.toMap) + mergePropertiesFromMap(props, extractConfig.asScala.toMap) props } + def mergePropertiesFromMap(props: Properties, mapProps: Map[String, String]): Unit = { + mapProps.foreach { case (k, v) => props.put(k, v) } + } + def getOption[T](commonVars: CommonVars[T]): Option[T] = if (commonVars.value != null) { Option(commonVars.value) } else { diff --git a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/ClassUtilsTest.scala b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/ClassUtilsTest.scala index c915f037e7e..590ec8c1a30 100644 --- a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/ClassUtilsTest.scala +++ b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/ClassUtilsTest.scala @@ -17,6 +17,8 @@ package org.apache.linkis.common.utils +import org.apache.commons.lang3.StringUtils + import java.util.Hashtable import org.junit.jupiter.api.Assertions._ @@ -25,12 +27,13 @@ import org.junit.jupiter.api.Test class ClassUtilsTest { @Test private[utils] def testJarOfClass(): Unit = { - val hashTable = new Hashtable[String, String]() - val someClass = ClassUtils.jarOfClass(hashTable.getClass) - val uri = hashTable.getClass.getResource( + + val someClass = ClassUtils.jarOfClass(classOf[StringUtils]) + val uri = classOf[StringUtils].getResource( "/" + - hashTable.getClass.getName.replace('.', '/') + ".class" + classOf[StringUtils].getName.replace('.', '/') + ".class" ) + println(s"StringutilsUri is $uri") assertEquals( Some(uri.toString.substring("jar:file:".length, uri.toString.indexOf("!"))), someClass diff --git a/linkis-commons/linkis-hadoop-common/pom.xml b/linkis-commons/linkis-hadoop-common/pom.xml index d5feb448791..965133a925c 100644 --- a/linkis-commons/linkis-hadoop-common/pom.xml +++ b/linkis-commons/linkis-hadoop-common/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-hadoop-common diff --git a/linkis-commons/linkis-httpclient/pom.xml b/linkis-commons/linkis-httpclient/pom.xml index d134693f237..b6d8480fa4b 100644 --- a/linkis-commons/linkis-httpclient/pom.xml +++ b/linkis-commons/linkis-httpclient/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-httpclient diff --git a/linkis-commons/linkis-rpc/pom.xml b/linkis-commons/linkis-rpc/pom.xml index 08495cd5eb5..e470ce399f6 100644 --- a/linkis-commons/linkis-rpc/pom.xml +++ b/linkis-commons/linkis-rpc/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-rpc diff --git a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/errorcode/LinkisRpcErrorCodeSummary.java b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/errorcode/LinkisRpcErrorCodeSummary.java new file mode 100644 index 00000000000..2268c164d6b --- /dev/null +++ b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/errorcode/LinkisRpcErrorCodeSummary.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.rpc.errorcode; + +public enum LinkisRpcErrorCodeSummary { + TIMEOUT_PERIOD( + 10002, + "The timeout period is not set!(超时时间未设置!)", + "The timeout period is not set!(超时时间未设置!)"), + CORRESPONDING_NOT_FOUND( + 10003, + "The corresponding anti-sequence class $objectClass was not found:(找不到对应的反序列类:)", + "The corresponding anti-sequence class was not found.(找不到对应的反序列类 :)"), + CORRESPONDING_TO_INITIALIZE( + 10004, + "The corresponding anti-sequence class failed to initialize:(对应的反序列类初始化失败:)", + "The corresponding anti-sequence class failed to initialize.(对应的反序列类 初始化失败:)"), + TRANSMITTED_BEAN_IS_NULL( + 10001, + "The transmitted bean is Null.(传输的bean为Null.", + "The transmitted bean is Null.(传输的bean为Null."), + APPLICATION_IS_NOT_EXISTS( + 10051, + "The instance ## of application ### is not exists.(应用程序### 的实例## 不存在。)", + "The instance ## of application ### is not exists.(应用程序### 的实例## 不存在。)"), + METHON_CALL_FAILED(10000, "method call failed:(方法调用失败:)", "method call failed:(方法调用失败:)"); + + /** 错误码 */ + private int errorCode; + /** 错误描述 */ + private String errorDesc; + /** 错误可能出现的原因 */ + private String comment; + + LinkisRpcErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/message/method/MessageExecutor.java b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/message/method/MessageExecutor.java index 3790735dcd9..d1d18b5060c 100644 --- a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/message/method/MessageExecutor.java +++ b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/message/method/MessageExecutor.java @@ -32,6 +32,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.linkis.rpc.errorcode.LinkisRpcErrorCodeSummary.METHON_CALL_FAILED; + public class MessageExecutor { private static final Logger logger = LoggerFactory.getLogger(MessageExecutor.class); @@ -92,7 +94,7 @@ private Object executeOneMethod( ? "method call failed: " + ((InvocationTargetException) t).getTargetException().getMessage() : "method call failed."; - throw new MessageWarnException(10000, errorMsg, t); + throw new MessageWarnException(METHON_CALL_FAILED.getErrorCode(), errorMsg, t); } } return result; diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/AsynRPCMessageBus.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/AsynRPCMessageBus.scala index 9313ad4b1a3..554c0c95bc3 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/AsynRPCMessageBus.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/AsynRPCMessageBus.scala @@ -42,12 +42,12 @@ class AsynRPCMessageBus(capacity: Int, busName: String)( override protected val dropEvent: DropEvent = new DropEvent { override def onDropEvent(event: RPCMessageEvent): Unit = throw new DWCRPCRetryException( - "Asyn RPC Consumer Queue is full, please retry after some times." + "Async RPC Consumer Queue is full, please retry after some times." ) override def onBusStopped(event: RPCMessageEvent): Unit = throw new RPCInitFailedException( RPCErrorConstants.RPC_INIT_ERROR, - "Asyn RPC Consumer Thread has stopped!" + "Async RPC Consumer Thread has stopped!" ) } diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCReceiveRestful.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCReceiveRestful.scala index 4bc97e8d704..6e5046330a9 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCReceiveRestful.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCReceiveRestful.scala @@ -24,6 +24,7 @@ import org.apache.linkis.rpc.conf.RPCConfiguration.{ BDP_RPC_RECEIVER_ASYN_CONSUMER_THREAD_MAX, BDP_RPC_RECEIVER_ASYN_QUEUE_CAPACITY } +import org.apache.linkis.rpc.errorcode.LinkisRpcErrorCodeSummary.TIMEOUT_PERIOD import org.apache.linkis.rpc.exception.DWCURIException import org.apache.linkis.rpc.transform.{RPCConsumer, RPCProduct} import org.apache.linkis.server.{catchIt, Message} @@ -178,7 +179,7 @@ private[rpc] class RPCReceiveRestful extends RPCReceiveRemote with Logging { override def receiveAndReplyInMills(@RequestBody message: Message): Message = catchIt { val duration = message.getData.get("duration") if (duration == null || StringUtils.isEmpty(duration.toString)) { - throw new DWCURIException(10002, "The timeout period is not set!(超时时间未设置!)") + throw new DWCURIException(TIMEOUT_PERIOD.getErrorCode, TIMEOUT_PERIOD.getErrorDesc) } val timeout = Duration(duration.toString.toLong, TimeUnit.MILLISECONDS) receiveAndReplyWithMessage(message, _.receiveAndReply(_, timeout, _)) diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/Sender.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/Sender.scala index 1ebdd0ec07f..f7bc861183b 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/Sender.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/Sender.scala @@ -86,6 +86,8 @@ object Sender { def getSender(serviceInstance: ServiceInstance): Sender = { if (RPCUtils.isPublicService(serviceInstance.getApplicationName)) { serviceInstance.setApplicationName(RPCConfiguration.PUBLIC_SERVICE_APPLICATION_NAME.getValue) + } else if (RPCUtils.isLinkisManageMerged(serviceInstance.getApplicationName)) { + serviceInstance.setApplicationName(RPCConfiguration.LINKIS_MANAGER_APPLICATION_NAME.getValue) } if (!serviceInstanceToSenders.containsKey(serviceInstance)) { serviceInstanceToSenders synchronized { diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala index 25f2baece12..787f0a2951f 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala @@ -51,6 +51,9 @@ object RPCConfiguration { val BDP_RPC_SENDER_ASYN_QUEUE_CAPACITY: CommonVars[Int] = CommonVars("wds.linkis.rpc.sender.asyn.queue.size.max", 2000) + val PUBLIC_SERVICE_APP_PREFIX: String = + CommonVars("wds.linkis.gateway.conf.publicservice.name", "linkis-ps-").getValue + val ENABLE_PUBLIC_SERVICE: CommonVars[Boolean] = CommonVars("wds.linkis.gateway.conf.enable.publicservice", true) @@ -59,9 +62,22 @@ object RPCConfiguration { val PUBLIC_SERVICE_LIST: Array[String] = CommonVars( "wds.linkis.gateway.conf.publicservice.list", - "cs,contextservice,data-source-manager,metadataquery,metadatamanager,query,jobhistory,application,configuration,filesystem,udf,variable,microservice,errorcode,bml,datasource,basedata-manager" + "cs,contextservice,data-source-manager,metadataQuery,metadatamanager,query,jobhistory,application,configuration,filesystem,udf,variable,microservice,errorcode,bml,datasource,basedata-manager" ).getValue.split(",") + val COMPUTATION_GOVERNANCE_APP_PREFIX: String = + CommonVars("linkis.gateway.conf.app.cg.prefix", "linkis-cg-").getValue + + val LINKIS_MANAGER_SERVICE_MERGED: CommonVars[Boolean] = + CommonVars("linkis.gateway.conf.app.merge.cg.manager", true) + + val LINKIS_MANAGER_APPLICATION_NAME: CommonVars[String] = + CommonVars("linkis.gateway.conf.app.cg.manager.name", "linkis-cg-linkismanager") + + val LINKIS_MANAGER_SERVICE_LIST: Array[String] = + CommonVars("linkis.gateway.conf.app.cg.manager.list", "linkisManager,engineplugin").getValue + .split(",") + val METADATAQUERY_SERVICE_APPLICATION_NAME: CommonVars[String] = CommonVars("wds.linkis.gateway.conf.publicservice.name", "linkis-ps-metadataquery") @@ -70,9 +86,6 @@ object RPCConfiguration { "metadatamanager,metadataquery" ).getValue.split(",") - val PUBLIC_SERVICE_APP_PREFIX: String = - CommonVars("wds.linkis.gateway.conf.publicservice.name", "linkis-ps-").getValue - val BDP_RPC_INSTANCE_ALIAS_SERVICE_REFRESH_INTERVAL: CommonVars[TimeType] = CommonVars("wds.linkis.rpc.instancealias.refresh.interval", new TimeType("3s")) diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala index 0c70393cbee..2a34fb34528 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala @@ -19,6 +19,7 @@ package org.apache.linkis.rpc.interceptor import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.rpc.errorcode.LinkisRpcErrorCodeSummary.APPLICATION_IS_NOT_EXISTS import org.apache.linkis.rpc.exception.NoInstanceExistsException import org.apache.linkis.rpc.sender.SpringCloudFeignConfigurationCache @@ -61,7 +62,7 @@ abstract class AbstractRPCServerLoader extends RPCServerLoader with Logging { serviceInstance: ServiceInstance ): Unit = { val instanceNotExists = new NoInstanceExistsException( - 10051, + APPLICATION_IS_NOT_EXISTS.getErrorCode, "The instance " + serviceInstance.getInstance + " of application " + serviceInstance.getApplicationName + " is not exists." ) diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCConsumer.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCConsumer.scala index 3191da79a3b..06ec1441b2d 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCConsumer.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCConsumer.scala @@ -19,6 +19,8 @@ package org.apache.linkis.rpc.transform import org.apache.linkis.common.exception.ExceptionManager import org.apache.linkis.common.utils.Utils +import org.apache.linkis.rpc.errorcode.LinkisRpcErrorCodeSummary.CORRESPONDING_NOT_FOUND +import org.apache.linkis.rpc.errorcode.LinkisRpcErrorCodeSummary.CORRESPONDING_TO_INITIALIZE import org.apache.linkis.rpc.exception.DWCURIException import org.apache.linkis.rpc.serializer.ProtostuffSerializeUtil import org.apache.linkis.server.{EXCEPTION_MSG, JMap, Message} @@ -50,12 +52,12 @@ private[linkis] object RPCConsumer { val clazz = Utils.tryThrow(Class.forName(objectClass)) { case _: ClassNotFoundException => new DWCURIException( - 10003, + CORRESPONDING_NOT_FOUND.getErrorCode, s"The corresponding anti-sequence class $objectClass was not found.(找不到对应的反序列类$objectClass.)" ) case t: ExceptionInInitializerError => val exception = new DWCURIException( - 10004, + CORRESPONDING_TO_INITIALIZE.getErrorCode, s"The corresponding anti-sequence class ${objectClass} failed to initialize.(对应的反序列类${objectClass}初始化失败.)" ) exception.initCause(t) diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCProduct.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCProduct.scala index e244758a458..238e41dfd79 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCProduct.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCProduct.scala @@ -20,6 +20,7 @@ package org.apache.linkis.rpc.transform import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.utils.Logging import org.apache.linkis.protocol.message.RequestProtocol +import org.apache.linkis.rpc.errorcode.LinkisRpcErrorCodeSummary.TRANSMITTED_BEAN_IS_NULL import org.apache.linkis.rpc.errorcode.RPCErrorConstants import org.apache.linkis.rpc.exception.DWCURIException import org.apache.linkis.rpc.serializer.ProtostuffSerializeUtil @@ -67,8 +68,12 @@ private[linkis] object RPCProduct extends Logging { } override def toMessage(t: Any): Message = { - if (t == null) - throw new DWCURIException(10001, "The transmitted bean is Null.(传输的bean为Null.)") + if (t == null) { + throw new DWCURIException( + TRANSMITTED_BEAN_IS_NULL.getErrorCode, + TRANSMITTED_BEAN_IS_NULL.getErrorDesc + ) + } val message = Message.ok("RPC Message.") if (isRequestProtocol(t)) { message.data(IS_REQUEST_PROTOCOL_CLASS, "true") diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala index e7d48305acb..cec8f1d6ca9 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala @@ -102,4 +102,18 @@ object RPCUtils { } } + def isLinkisManageMerged(appName: String): Boolean = { + if (!RPCConfiguration.LINKIS_MANAGER_SERVICE_MERGED.getValue || StringUtils.isBlank(appName)) { + return false + } + val appNameLower = appName.toLowerCase() + if (appNameLower.startsWith(RPCConfiguration.COMPUTATION_GOVERNANCE_APP_PREFIX)) { + val serviceName = + appNameLower.replaceFirst(RPCConfiguration.COMPUTATION_GOVERNANCE_APP_PREFIX, "") + RPCConfiguration.LINKIS_MANAGER_SERVICE_LIST.exists(_.equalsIgnoreCase(serviceName)) + } else { + false + } + } + } diff --git a/linkis-commons/linkis-storage/pom.xml b/linkis-commons/linkis-storage/pom.xml index 56ae1ab2aa8..43904772a09 100644 --- a/linkis-commons/linkis-storage/pom.xml +++ b/linkis-commons/linkis-storage/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-storage @@ -74,10 +75,22 @@ spring-core + + org.apache.poi + poi + ${poi.version} + + + + org.apache.poi + poi-ooxml + ${poi.version} + + com.github.pjfanning excel-streaming-reader - 4.0.1 + 4.0.2 diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisIoFileClientErrorCodeSummary.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisIoFileClientErrorCodeSummary.java new file mode 100644 index 00000000000..6a26779867b --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisIoFileClientErrorCodeSummary.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.errorcode; + +public enum LinkisIoFileClientErrorCodeSummary { + NO_PROXY_USER( + 52002, + "proxy user not set, can not get the permission information.(没有设置代理 proxy 用户,无法获取权限信息)", + "proxy user not set, can not get the permission information.(没有设置代理 proxy 用户,无法获取权限信息)"), + FAILED_TO_INIT_USER( + 52002, + "Failed to init FS for user:(为用户初始化 FS 失败:)", + "Failed to init FS for user:(为用户初始化 FS 失败:)"), + ENGINE_CLOSED_IO_ILLEGAL( + 52002, + "has been closed, IO operation was illegal.(已经关闭,IO操作是非法的.)", + "has been closed, IO operation was illegal.(已经关闭,IO操作是非法的.)"), + STORAGE_HAS_BEEN_CLOSED( + 52002, "storage has been closed.(存储已关闭.)", "storage has been closed.(存储已关闭.)"); + + /** 错误码 */ + private int errorCode; + /** 错误描述 */ + private String errorDesc; + /** 错误可能出现的原因 */ + private String comment; + + LinkisIoFileClientErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisIoFileErrorCodeSummary.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisIoFileErrorCodeSummary.java new file mode 100644 index 00000000000..42710cb53b2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisIoFileErrorCodeSummary.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.errorcode; + +public enum LinkisIoFileErrorCodeSummary { + CANNOT_BE_EMPTY( + 53002, + "The read method parameter cannot be empty(read方法参数不能为空)", + "The read method parameter cannot be empty(read方法参数不能为空)"), + FS_CAN_NOT_PROXY_TO( + 52002, "FS Can not proxy to:{}(FS 不能代理到:{})", "FS Can not proxy to:{}(FS 不能代理到:{})"), + NOT_EXISTS_METHOD( + 53003, "not exists method {} in fs {}(方法不存在)", "not exists method {} in fs {}(方法不存在)"), + PARAMETER_CALLS( + 53003, "Unsupported parameter calls(不支持的参数调用)", "Unsupported parameter calls(不支持的参数调用)"); + /** 错误码 */ + private int errorCode; + /** 错误描述 */ + private String errorDesc; + /** 错误可能出现的原因 */ + private String comment; + + LinkisIoFileErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java new file mode 100644 index 00000000000..656f07270f2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.errorcode; + +public enum LinkisStorageErrorCodeSummary { + UNSUPPORTED_FILE( + 50000, + "Unsupported file system type(不支持的文件系统类型):{}", + "Unsupported file system type(不支持的文件系统类型):{}"), + UNSUPPORTED_RESULT( + 50000, "Unsupported result type(不支持的结果类型):{}", "Unsupported result type(不支持的结果类型):{}"), + CONFIGURATION_NOT_READ( + 50001, + "HDFS configuration was not read, please configure hadoop.config.dir or add env:HADOOP_CONF_DIR(HDFS 配置未读取,请配置 hadoop.config.dir 或添加 env:HADOOP_CONF_DIR)", + "HDFS configuration was not read, please configure hadoop.config.dir or add env:HADOOP_CONF_DIR(HDFS 配置未读取,请配置 hadoop.config.dir 或添加 env:HADOOP_CONF_DIR)"), + FAILED_TO_READ_INTEGER(51000, "failed to read integer(读取整数失败)", "failed to read integer(读取整数失败)"), + THE_FILE_IS_EMPTY(51000, "The file{}is empty(文件{}为空)", "The file{}is empty(文件{}为空)"), + TO_BE_UNKNOW(51001, "", ""), + FSN_NOT_INIT_EXCEPTION(52000, "FSNotInitException", "FSNotInitException"), + PARSING_METADATA_FAILED( + 52001, "Parsing metadata failed(解析元数据失败)", "Parsing metadata failed(解析元数据失败)"), + TABLE_ARE_NOT_SUPPORTED( + 52002, + "Result sets that are not tables are not supported(不支持不是表格的结果集)", + "Result sets that are not tables are not supported(不支持不是表格的结果集)"), + MUST_REGISTER_TOC( + 52004, + "You must register IOClient before you can use proxy mode.(必须先注册IOClient,才能使用代理模式)", + "You must register IOClient before you can use proxy mode.(必须先注册IOClient,才能使用代理模式)"), + MUST_REGISTER_TOM( + 52004, + "You must register IOMethodInterceptorCreator before you can use proxy mode.(必须先注册IOMethodInterceptorCreator,才能使用代理模式)", + "You must register IOMethodInterceptorCreator before you can use proxy mode.(必须先注册IOMethodInterceptorCreator,才能使用代理模式)"), + UNSUPPORTED_OPEN_FILE_TYPE( + 54001, "Unsupported open file type(不支持打开的文件类型)", "Unsupported open file type(不支持打开的文件类型)"), + INCALID_CUSTOM_PARAMETER( + 65000, "Invalid custom parameter(不合法的自定义参数)", "Invalid custom parameter(不合法的自定义参数)"); + + /** 错误码 */ + private int errorCode; + /** 错误描述 */ + private String errorDesc; + /** 错误可能出现的原因 */ + private String comment; + + LinkisStorageErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/FSNotInitException.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/FSNotInitException.java index 885e6748de9..7c4d587640b 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/FSNotInitException.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/FSNotInitException.java @@ -19,10 +19,12 @@ import org.apache.linkis.common.exception.WarnException; +import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.FSN_NOT_INIT_EXCEPTION; + public class FSNotInitException extends WarnException { public FSNotInitException() { - super(52000, "FSNotInitException"); + super(FSN_NOT_INIT_EXCEPTION.getErrorCode(), FSN_NOT_INIT_EXCEPTION.getErrorDesc()); } public FSNotInitException(int errCode, String desc) { diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java index 8781ec0c4f5..76116100767 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java @@ -20,7 +20,7 @@ public enum StorageErrorCode { /** */ - FS_NOT_INIT(53001, "please init first"); + FS_NOT_INIT(53001, "please init first(请先初始化)"); StorageErrorCode(int errorCode, String message) { this.code = errorCode; diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java index 82975d541f8..d7452aa8e77 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java @@ -57,6 +57,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.TO_BE_UNKNOW; + public class LocalFileSystem extends FileSystem { private static final Logger LOG = LoggerFactory.getLogger(LocalFileSystem.class); @@ -233,7 +235,7 @@ public boolean setPermission(FsPath dest, String permission) throws IOException } catch (NoSuchFileException e) { LOG.warn("File or folder does not exist or file name is garbled(文件或者文件夹不存在或者文件名乱码)", e); - throw new StorageWarnException(51001, e.getMessage()); + throw new StorageWarnException(TO_BE_UNKNOW.getErrorCode(), e.getMessage()); } return true; } @@ -326,7 +328,7 @@ public FsPath get(String dest) throws IOException { attr = Files.readAttributes(Paths.get(fsPath.getPath()), PosixFileAttributes.class); } catch (NoSuchFileException e) { LOG.warn("File or folder does not exist or file name is garbled(文件或者文件夹不存在或者文件名乱码)", e); - throw new StorageWarnException(51001, e.getMessage()); + throw new StorageWarnException(TO_BE_UNKNOW.getErrorCode(), e.getMessage()); } fsPath.setIsdir(attr.isDirectory()); @@ -441,6 +443,7 @@ public boolean renameTo(FsPath oldDest, FsPath newDest) throws IOException { throw new IOException("only owner can rename path " + path); } + @Override public void close() throws IOException {} /** Utils method start */ @@ -461,7 +464,8 @@ private boolean can( return true; } String pathGroup = attr.group().getName(); - if ((pathGroup.equals(user) || group.contains(pathGroup)) + LOG.debug("pathGroup: {}, group: {}, permissions: {}", pathGroup, group, permissions); + if ((pathGroup.equals(user) || (group != null && group.contains(pathGroup))) && permissions.contains(groupPermission)) { return true; } else if (permissions.contains(otherPermission)) { diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala index c61d33cfe32..dd5d9d80629 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala @@ -19,6 +19,7 @@ package org.apache.linkis.storage import org.apache.linkis.common.io.{Fs, FsPath} import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_FILE import org.apache.linkis.storage.exception.StorageFatalException import org.apache.linkis.storage.factory.BuildFactory import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} @@ -32,7 +33,10 @@ object FSFactory extends Logging { def getBuildFactory(fsName: String): BuildFactory = { if (!buildClasses.contains(fsName)) { - throw new StorageFatalException(50000, s"Unsupported file system type(不支持的文件系统类型):$fsName") + throw new StorageFatalException( + UNSUPPORTED_FILE.getErrorCode, + s"Unsupported file system type(不支持的文件系统类型):$fsName" + ) } buildClasses(fsName) } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala index be9656eb1c2..378c2c2ecb7 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala @@ -18,6 +18,7 @@ package org.apache.linkis.storage.domain import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.FAILED_TO_READ_INTEGER import org.apache.linkis.storage.exception.StorageWarnException import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} @@ -67,7 +68,10 @@ object Dolphin extends Logging { def readInt(inputStream: InputStream): Int = { val bytes = new Array[Byte](INT_LEN + 1) if (StorageUtils.readBytes(inputStream, bytes, INT_LEN) != INT_LEN) { - throw new StorageWarnException(51000, "failed to read integer(读取整数失败)") + throw new StorageWarnException( + FAILED_TO_READ_INTEGER.getErrorCode, + FAILED_TO_READ_INTEGER.getErrorDesc + ) } getString(bytes, 0, INT_LEN).toInt } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala index 0457b8a1f85..b21bf7e4929 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala @@ -18,6 +18,7 @@ package org.apache.linkis.storage.io import org.apache.linkis.storage.domain.MethodEntity +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.MUST_REGISTER_TOC import org.apache.linkis.storage.exception.StorageErrorException import java.util.UUID @@ -50,8 +51,8 @@ object IOClient { def getIOClient(): IOClient = { if (ioClient == null) { throw new StorageErrorException( - 52004, - "You must register IOClient before you can use proxy mode.(必须先注册IOClient,才能使用代理模式)" + MUST_REGISTER_TOC.getErrorCode, + MUST_REGISTER_TOC.getErrorDesc ) } ioClient diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala index ec298b5175b..51e1589eb78 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala @@ -17,6 +17,7 @@ package org.apache.linkis.storage.io +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.MUST_REGISTER_TOM import org.apache.linkis.storage.exception.StorageErrorException import org.springframework.cglib.proxy.MethodInterceptor @@ -42,8 +43,8 @@ object IOMethodInterceptorCreator { def getIOMethodInterceptor(fsName: String): MethodInterceptor = { if (interceptorCreator == null) { throw new StorageErrorException( - 52004, - "You must register IOMethodInterceptorCreator before you can use proxy mode.(必须先注册IOMethodInterceptorCreator,才能使用代理模式)" + MUST_REGISTER_TOM.getErrorCode, + MUST_REGISTER_TOM.getErrorDesc ) } interceptorCreator.createIOMethodInterceptor(fsName) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala index f9fd3c9dcf9..78932115cf7 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala @@ -22,6 +22,10 @@ import org.apache.linkis.common.io.resultset.ResultSet import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.storage.FSFactory import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.{ + THE_FILE_IS_EMPTY, + UNSUPPORTED_RESULT +} import org.apache.linkis.storage.exception.{StorageErrorException, StorageWarnException} import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} @@ -43,7 +47,10 @@ class DefaultResultSetFactory extends ResultSetFactory with Logging { override def getResultSetByType(resultSetType: String): ResultSet[_ <: MetaData, _ <: Record] = { if (!resultClasses.contains(resultSetType)) { - throw new StorageErrorException(50000, s"Unsupported result type(不支持的结果类型):$resultSetType") + throw new StorageErrorException( + UNSUPPORTED_RESULT.getErrorCode, + s"Unsupported result type(不支持的结果类型):$resultSetType" + ) } resultClasses(resultSetType).newInstance() } @@ -78,7 +85,7 @@ class DefaultResultSetFactory extends ResultSetFactory with Logging { val resultSetType = Dolphin.getType(inputStream) if (StringUtils.isEmpty(resultSetType)) { throw new StorageWarnException( - 51000, + THE_FILE_IS_EMPTY.getErrorCode, s"The file (${fsPath.getPath}) is empty(文件(${fsPath.getPath}) 为空)" ) } @@ -99,7 +106,7 @@ class DefaultResultSetFactory extends ResultSetFactory with Logging { val resultSetType = Dolphin.getType(inputStream) if (StringUtils.isEmpty(resultSetType)) { throw new StorageWarnException( - 51000, + THE_FILE_IS_EMPTY.getErrorCode, s"The file (${fsPath.getPath}) is empty(文件(${fsPath.getPath}) 为空)" ) } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala index 3b1c4ad2b64..663e379b5b6 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala @@ -20,6 +20,7 @@ package org.apache.linkis.storage.resultset import org.apache.linkis.common.io.{FsPath, MetaData, Record} import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetReader} import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED import org.apache.linkis.storage.exception.StorageErrorException import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord, TableResultSet} @@ -67,8 +68,8 @@ object ResultSetReader { val resultSet = rsFactory.getResultSet(res) if (ResultSetFactory.TABLE_TYPE != resultSet.resultSetType()) { throw new StorageErrorException( - 52002, - "Result sets that are not tables are not supported(不支持不是表格的结果集)" + TABLE_ARE_NOT_SUPPORTED.getErrorCode, + TABLE_ARE_NOT_SUPPORTED.getErrorDesc ) } ResultSetReader.getResultSetReader(resultSet.asInstanceOf[TableResultSet], res) @@ -77,8 +78,8 @@ object ResultSetReader { val resultSet = rsFactory.getResultSetByPath(resPath) if (ResultSetFactory.TABLE_TYPE != resultSet.resultSetType()) { throw new StorageErrorException( - 52002, - "Result sets that are not tables are not supported(不支持不是表格的结果集)" + TABLE_ARE_NOT_SUPPORTED.getErrorCode, + TABLE_ARE_NOT_SUPPORTED.getErrorDesc ) } val fs = FSFactory.getFs(resPath) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala index 07612ef7e4c..40c4e031f7a 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala @@ -19,6 +19,7 @@ package org.apache.linkis.storage.resultset.table import org.apache.linkis.common.io.resultset.ResultDeserializer import org.apache.linkis.storage.domain.{Column, DataType, Dolphin} +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.PARSING_METADATA_FAILED import org.apache.linkis.storage.exception.StorageErrorException import scala.collection.mutable.ArrayBuffer @@ -38,7 +39,10 @@ class TableResultDeserializer extends ResultDeserializer[TableMetaData, TableRec } else colString.split(Dolphin.COL_SPLIT) var index = Dolphin.INT_LEN + colByteLen if (colArray.length % 3 != 0) { - throw new StorageErrorException(52001, "Parsing metadata failed(解析元数据失败)") + throw new StorageErrorException( + PARSING_METADATA_FAILED.getErrorCode, + PARSING_METADATA_FAILED.getErrorDesc + ) } val columns = new ArrayBuffer[Column]() for (i <- 0 until (colArray.length, 3)) { diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala index ca89bd2f846..d96c52df9ae 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala @@ -68,30 +68,32 @@ object VariableParser { import scala.collection.JavaConverters._ val vars = new util.HashMap[String, String] val confs = new util.HashMap[String, Object] - variables.filter(_.sort == null).foreach(f => vars.asScala += f.key -> f.value) + variables.filter(_.sort == null).foreach(f => vars.put(f.key, f.value)) variables.filter(_.sort != null).foreach { f => f.sort match { case STARTUP | RUNTIME | SPECIAL => if (confs.get(f.sort) == null) { - confs.asScala += f.sort -> createMap(f) + confs.put(f.sort, createMap(f)) } else { - confs.get(f.sort).asInstanceOf[util.HashMap[String, Object]].asScala += f.key -> f.value + confs.get(f.sort).asInstanceOf[util.HashMap[String, Object]].put(f.key, f.value) } case _ => if (confs.get(f.sortParent) == null) { - confs.asScala += f.sortParent -> new util.HashMap[String, Object] + confs.put(f.sortParent, new util.HashMap[String, Object]) confs .get(f.sortParent) .asInstanceOf[util.HashMap[String, Object]] - .asScala += f.sort -> createMap(f) + .put(f.sort, createMap(f)) } else { val subMap = confs.get(f.sortParent).asInstanceOf[util.HashMap[String, Object]] - if (subMap.get(f.sort) == null) subMap.asScala += f.sort -> createMap(f) - else + if (subMap.get(f.sort) == null) { + subMap.put(f.sort, createMap(f)) + } else { subMap .get(f.sort) .asInstanceOf[util.HashMap[String, Object]] - .asScala += f.key -> f.value + .put(f.key, f.value) + } } } } @@ -101,39 +103,10 @@ object VariableParser { params } - import scala.collection.JavaConverters._ - private def createMap(variable: Variable): util.Map[String, Object] = { - val map = new util.HashMap[String, Object] - map.asScala += variable.key -> variable.value - }.asJava - - /* def main(args: Array[String]): Unit = { - val a = Array( - Variable("variable", null, "a", "b"), - Variable("variable", null, "a1", "b1"), - Variable("configuration", "startup", "e", "f"), - Variable("configuration", "runtime", "a", "b"), - Variable("runtime", "env", "g2", "h3"), - Variable("startup", "hello", "g2", "h3")) - // val a = Array[Variable]() - // println(new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").serializeNulls.create.toJson(getMap(a))) - // val variables: Array[Variable] = getVariables(getMap(a)) - // val variables: Array[Variable] = getVariables(getMap(a)) - // print(variables) - /* val writer = ScriptFsWriter.getScriptFsWriter(new FsPath("file:///tmp/hello.py"), "utf-8", new FileOutputStream("E:\\aaa.py")) - - writer.addMetaData(new ScriptMetaData(a)) - writer.addRecord(new ScriptRecord("hello")) - writer.addRecord(new ScriptRecord("hello")) - writer.addRecord(new ScriptRecord("hello")) - writer.addRecord(new ScriptRecord("hello"))*/ - val reader = ScriptFsReader.getScriptFsReader(new FsPath("file:///tmp/aaa.py"),"utf-8",new FileInputStream("E:\\aaa.py")) - reader.getMetaData.asInstanceOf[ScriptMetaData].getMetaData - val map = getMap(reader.getMetaData.asInstanceOf[ScriptMetaData].getMetaData) - println(new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").serializeNulls.create.toJson(map)) - print(reader.getRecord.asInstanceOf[ScriptRecord].getLine) - } */ + map.put(variable.key, variable.value) + map + } } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala index 711b443152f..5d480eaeb6b 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala @@ -17,6 +17,7 @@ package org.apache.linkis.storage.script.parser +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.INCALID_CUSTOM_PARAMETER import org.apache.linkis.storage.exception.StorageErrorException import org.apache.linkis.storage.script.{Parser, Variable, VariableParser} @@ -31,15 +32,24 @@ abstract class CommonScriptParser extends Parser { case _ => val split = line.split("=") if (split.length != 2) { - throw new StorageErrorException(65000, "Invalid custom parameter(不合法的自定义参数)") + throw new StorageErrorException( + INCALID_CUSTOM_PARAMETER.getErrorCode(), + INCALID_CUSTOM_PARAMETER.getErrorDesc + ) } val value = split(1).trim val subSplit = split(0).split(" ") if (subSplit.filter(_ != "").size != 4) { - throw new StorageErrorException(65000, "Invalid custom parameter(不合法的自定义参数)") + throw new StorageErrorException( + INCALID_CUSTOM_PARAMETER.getErrorCode(), + INCALID_CUSTOM_PARAMETER.getErrorDesc + ) } if (!subSplit.filter(_ != "")(0).equals(prefixConf)) { - throw new StorageErrorException(65000, "Invalid custom parameter(不合法的自定义参数)") + throw new StorageErrorException( + INCALID_CUSTOM_PARAMETER.getErrorCode(), + INCALID_CUSTOM_PARAMETER.getErrorDesc + ) } val sortParent = subSplit.filter(_ != "")(1).trim val sort = subSplit.filter(_ != "")(2).trim diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala index 84fcc9a60fc..7a9fa4f04c7 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala @@ -19,6 +19,7 @@ package org.apache.linkis.storage.source import org.apache.linkis.common.io._ import org.apache.linkis.storage.conf.LinkisStorageConf +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE import org.apache.linkis.storage.exception.StorageErrorException import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetReader} import org.apache.linkis.storage.script.ScriptFsReader @@ -89,8 +90,12 @@ object FileSource { } def create(fsPath: FsPath, fs: Fs): FileSource = { - if (!canRead(fsPath.getPath)) - throw new StorageErrorException(54001, "Unsupported open file type(不支持打开的文件类型)") + if (!canRead(fsPath.getPath)) { + throw new StorageErrorException( + UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode, + UNSUPPORTED_OPEN_FILE_TYPE.getErrorDesc + ) + } if (isResultSet(fsPath)) { new ResultsetFileSource(Array(createResultSetFileSplit(fsPath, fs))) } else { @@ -99,8 +104,12 @@ object FileSource { } def create(fsPath: FsPath, is: InputStream): FileSource = { - if (!canRead(fsPath.getPath)) - throw new StorageErrorException(54001, "Unsupported open file type(不支持打开的文件类型)") + if (!canRead(fsPath.getPath)) { + throw new StorageErrorException( + UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode, + UNSUPPORTED_OPEN_FILE_TYPE.getErrorDesc + ) + } if (isResultSet(fsPath)) { new ResultsetFileSource(Array(createResultSetFileSplit(fsPath, is))) } else { diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala index 0c7e3e7a11e..358206357c7 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala @@ -63,7 +63,7 @@ class FileSplit( } def addParams(key: String, value: String): Unit = { - this.params.asScala += key -> value + this.params.put(key, value) } def `while`[M](m: MetaData => M, r: Record => Unit): M = { diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala index d025ae66474..90eb319fa01 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala @@ -21,6 +21,7 @@ import org.apache.linkis.common.io.{Fs, FsPath} import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.hadoop.common.conf.HadoopConf import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.CONFIGURATION_NOT_READ import org.apache.linkis.storage.exception.StorageFatalException import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetReader, ResultSetWriter} @@ -180,8 +181,8 @@ object StorageUtils extends Logging { // TODO IO-client mode need return false if (!confPath.exists() || confPath.isFile) { throw new StorageFatalException( - 50001, - "HDFS configuration was not read, please configure hadoop.config.dir or add env:HADOOP_CONF_DIR" + CONFIGURATION_NOT_READ.getErrorCode, + CONFIGURATION_NOT_READ.getErrorDesc ) } else true } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/assembly/distribution.xml index bbc634bd2b9..a1cf04696a8 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/assembly/distribution.xml +++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-cli dir diff --git a/linkis-computation-governance/linkis-client/linkis-cli/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/pom.xml index 73758edd4f3..f2331505c20 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/pom.xml +++ b/linkis-computation-governance/linkis-client/linkis-cli/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-cli pom diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml b/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml index 9be30ea3955..4f220d8acc2 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-computation-client diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/image/ShowImage.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/image/ShowImage.scala index 74bb5238de4..9e83d614344 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/image/ShowImage.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/image/ShowImage.scala @@ -17,7 +17,7 @@ package org.apache.linkis.ujes.client.response.image -import javax.swing.{ImageIcon, JFrame} +import javax.swing.{ImageIcon, JFrame, WindowConstants} import java.util import java.util.Base64 @@ -82,7 +82,7 @@ object ShowImage { val frame = new JFrame frame.setContentPane(new ImagePanel(new ImageIcon(data).getImage)) - frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE) + frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE) frame.setBounds(100, 100, width, height) frame.setVisible(true) diff --git a/linkis-computation-governance/linkis-computation-governance-common/pom.xml b/linkis-computation-governance/linkis-computation-governance-common/pom.xml index 9397de13c29..f83b8918eea 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/pom.xml +++ b/linkis-computation-governance/linkis-computation-governance-common/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-computation-governance-common diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/exception/engineconn/EngineConnExecutorErrorCode.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorCode.java similarity index 95% rename from linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/exception/engineconn/EngineConnExecutorErrorCode.java rename to linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorCode.java index 8e53c46e235..c5194539670 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/exception/engineconn/EngineConnExecutorErrorCode.java +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorCode.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.governance.exception.engineconn; +package org.apache.linkis.governance.common.exception.engineconn; /** * ErrorCode of Engine start with 40000 diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/exception/engineconn/EngineConnExecutorErrorException.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorException.java similarity index 95% rename from linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/exception/engineconn/EngineConnExecutorErrorException.java rename to linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorException.java index 2f0983df778..9eea943fa06 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/exception/engineconn/EngineConnExecutorErrorException.java +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorException.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.governance.exception.engineconn; +package org.apache.linkis.governance.common.exception.engineconn; import org.apache.linkis.common.exception.ErrorException; diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/pom.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/pom.xml index d99723c92ac..3052dcc7cbf 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/pom.xml +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - ../../pom.xml + ../../../pom.xml linkis-engineconn-manager-core diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala index a9fa8ea933a..eeb976bfa38 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala @@ -66,7 +66,7 @@ abstract class ShellProcessEngineCommandBuilder extends ProcessEngineCommandBuil class UnixProcessEngineCommandBuilder extends ShellProcessEngineCommandBuilder { - newLine("#!/bin/bash") + newLine("#!/usr/bin/env bash") if (ECPCoreConf.CORE_DUMP_DISABLE) { newLine("ulimit -c 0") diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml index f7539768d55..c48fdc0b080 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - ../../pom.xml + ../../../pom.xml linkis-engineconn-manager-server diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml index 7e5d90e65fa..da4e71662bb 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-engineconn-manager dir diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala index 7d1190d919c..c67a3cc197f 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala @@ -18,6 +18,7 @@ package org.apache.linkis.ecm.server.service.impl import org.apache.linkis.DataWorkCloudApplication +import org.apache.linkis.common.conf.Configuration import org.apache.linkis.common.io.FsPath import org.apache.linkis.common.utils.{Logging, Utils, ZipUtils} import org.apache.linkis.ecm.core.conf.ECMErrorCode @@ -35,6 +36,8 @@ import org.apache.linkis.storage.FSFactory import org.apache.linkis.storage.fs.FileSystem import org.apache.linkis.storage.utils.{FileSystemUtils, StorageUtils} +import org.springframework.core.env.Environment + import java.io.File import java.nio.file.Paths @@ -54,9 +57,14 @@ class BmlResourceLocalizationService extends ResourceLocalizationService with Lo private var localDirsHandleService: LocalDirsHandleService = _ + private var springEnv: Environment = _ + def setLocalDirsHandleService(localDirsHandleService: LocalDirsHandleService): Unit = this.localDirsHandleService = localDirsHandleService + def setSpringEnv(springEnv: Environment): Unit = + this.springEnv = springEnv + override def handleInitEngineConnResources( request: EngineConnLaunchRequest, engineConn: EngineConn @@ -87,7 +95,19 @@ class BmlResourceLocalizationService extends ResourceLocalizationService with Lo override val engineConnWorkDir: String = workDir override val engineConnLogDirs: String = logDirs override val engineConnTempDirs: String = tmpDirs - override val engineConnManagerHost: String = Utils.getComputerName + override val engineConnManagerHost: String = { + var hostName = Utils.getComputerName + val eurekaPreferIp = Configuration.EUREKA_PREFER_IP + logger.info("eurekaPreferIp:" + eurekaPreferIp) + if (eurekaPreferIp) { + hostName = springEnv.getProperty("spring.cloud.client.ip-address") + logger.info("hostName:" + hostName) + logger.info( + "using ip address replace hostname,beacause eureka.instance.prefer-ip-address:" + eurekaPreferIp + ) + } + hostName + } override val engineConnManagerPort: String = DataWorkCloudApplication.getApplicationContext.getEnvironment.getProperty( "server.port" diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala index e74f041a802..4a684bbec1d 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala @@ -22,12 +22,17 @@ import org.apache.linkis.ecm.server.context.{DefaultECMContext, ECMContext} import org.apache.linkis.ecm.server.service._ import org.apache.linkis.ecm.server.service.impl._ +import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean import org.springframework.context.annotation.{Bean, Configuration} +import org.springframework.core.env.Environment @Configuration class ECMSpringConfiguration { + @Autowired + private var env: Environment = _; + @Bean @ConditionalOnMissingBean def getDefaultEngineConnManagerContext: ECMContext = { @@ -48,6 +53,7 @@ class ECMSpringConfiguration { ): ResourceLocalizationService = { val service: BmlResourceLocalizationService = new BmlResourceLocalizationService service.setLocalDirsHandleService(localDirsHandleService) + service.setSpringEnv(env) service } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala index f5c936114a6..7917faf409f 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala @@ -46,9 +46,10 @@ object ECMUtils { } if (!response.isSuccess) throw new ECMErrorException(911115, "failed to downLoad(下载失败)") val map = new util.HashMap[String, Object] - map.asScala += "path" -> response.fullFilePath - map.asScala += "is" -> response.inputStream - }.asJava + map.put("path", response.fullFilePath) + map.put("is", response.inputStream) + map + } def downLoadBmlResourceToLocal(resource: BmlResource, userName: String, path: String)(implicit fs: FileSystem diff --git a/linkis-computation-governance/linkis-engineconn-manager/pom.xml b/linkis-computation-governance/linkis-engineconn-manager/pom.xml index a6136daf144..0d9d67dea32 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/pom.xml +++ b/linkis-computation-governance/linkis-engineconn-manager/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-engineconn-manager diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml index b05fdba1874..06381e0c8f7 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml +++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../../pom.xml linkis-once-engineconn diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-streaming-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-streaming-engineconn/pom.xml index f00f34e46f5..fe881cf37b3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-streaming-engineconn/pom.xml +++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-streaming-engineconn/pom.xml @@ -23,6 +23,7 @@ linkis org.apache.linkis 1.3.0 + ../../../../pom.xml 4.0.0 diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml index dd86edc26e5..d7cd8ac7d2c 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - ../../pom.xml + ../../../pom.xml linkis-computation-engineconn diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala index e54f6759737..08124f2225a 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala @@ -36,7 +36,7 @@ import org.apache.linkis.engineconn.executor.listener.{ EngineConnSyncListenerBus, ExecutorListenerBusContext } -import org.apache.linkis.governance.exception.engineconn.EngineConnExecutorErrorException +import org.apache.linkis.governance.common.exception.engineconn.EngineConnExecutorErrorException import org.apache.linkis.protocol.engine.JobProgressInfo import org.apache.linkis.scheduler.executer.{AliasOutputExecuteResponse, OutputExecuteResponse} import org.apache.linkis.storage.{LineMetaData, LineRecord} @@ -83,6 +83,10 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String }) } + /** + * Note: the writer will be closed at the end of the method + * @param resultSetWriter + */ def sendResultSet(resultSetWriter: ResultSetWriter[_ <: MetaData, _ <: Record]): Unit = { logger.info("Start to send res to entrance") val fileName = new File(resultSetWriter.toFSPath.getPath).getName diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala index 981d12d9122..608c2da2a25 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala @@ -50,12 +50,12 @@ import org.apache.linkis.engineconn.executor.entity.ResourceFetchExecutor import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.engineconn.executor.listener.event.EngineConnSyncEvent import org.apache.linkis.governance.common.entity.ExecutionNodeStatus -import org.apache.linkis.governance.common.protocol.task._ -import org.apache.linkis.governance.common.utils.JobUtils -import org.apache.linkis.governance.exception.engineconn.{ +import org.apache.linkis.governance.common.exception.engineconn.{ EngineConnExecutorErrorCode, EngineConnExecutorErrorException } +import org.apache.linkis.governance.common.protocol.task._ +import org.apache.linkis.governance.common.utils.JobUtils import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.manager.common.protocol.resource.{ ResponseTaskRunningInfo, diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala index 981f389c196..cfa3c94a678 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala @@ -105,7 +105,7 @@ class ECTaskEntranceInfoAccess extends ConnectionInfoAccess with Logging { "invalid data-type: " + request.getClass.getCanonicalName ) } - JavaConverters.asScalaIteratorConverter(ret.iterator()).asScala.toList + ret.iterator().asScala.toList } private def getDWCServiceInstance(serviceInstance: SpringCloudServiceInstance): ServiceInstance = diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala index d72ec939d60..88f9112b32e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala @@ -35,11 +35,11 @@ import org.apache.linkis.engineconn.core.execution.{ import org.apache.linkis.engineconn.core.hook.ShutdownHook import org.apache.linkis.engineconn.core.util.EngineConnUtils import org.apache.linkis.governance.common.conf.GovernanceCommonConf -import org.apache.linkis.governance.common.utils.EngineConnArgumentsParser -import org.apache.linkis.governance.exception.engineconn.{ +import org.apache.linkis.governance.common.exception.engineconn.{ EngineConnExecutorErrorCode, EngineConnExecutorErrorException } +import org.apache.linkis.governance.common.utils.EngineConnArgumentsParser import org.apache.linkis.manager.engineplugin.common.launch.process.Environment import org.apache.linkis.manager.label.builder.factory.{ LabelBuilderFactory, diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/pom.xml index c7c26328493..71704146923 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/pom.xml +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - ../../../pom.xml + ../../../../pom.xml linkis-accessible-executor diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala index 41232eee479..3e414baed26 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala @@ -59,7 +59,6 @@ object LogHelper extends Logging { } if (logs != null && logs.size > 0) { val sb: StringBuilder = new StringBuilder - import scala.collection.JavaConverters._ logs.asScala map (log => log + "\n") foreach sb.append logListener.onLogUpdate(TaskLogUpdateEvent(null, sb.toString)) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala index 8823053fd03..4d85fab4895 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala @@ -27,7 +27,7 @@ import org.apache.linkis.engineconn.acessible.executor.listener.event.{ import org.apache.linkis.engineconn.acessible.executor.lock.EngineConnTimedLock import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext -import org.apache.linkis.governance.exception.engineconn.{ +import org.apache.linkis.governance.common.exception.engineconn.{ EngineConnExecutorErrorCode, EngineConnExecutorErrorException } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/pom.xml index 6d801bf8a04..6dc215c4cf1 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/pom.xml +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../../pom.xml linkis-executor-core diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/pom.xml index 0d7f108415b..d22c4368752 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/pom.xml +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - + ../../../pom.xml linkis-engineconn-plugin-core diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala index e4150418721..290c6211e1b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala @@ -19,6 +19,8 @@ package org.apache.linkis.manager.engineplugin.common.conf import org.apache.linkis.common.conf.{ByteType, CommonVars, Configuration} +import org.apache.commons.lang3.{JavaVersion, SystemUtils} + object EnvConfiguration { val HIVE_CONF_DIR = CommonVars[String]( @@ -39,9 +41,15 @@ object EnvConfiguration { val ENGINE_CONN_CLASSPATH_FILES = CommonVars("wds.linkis.engineConn.files", "", "engineConn额外的配置文件") + val metaspaceSize = if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) { + "-XX:MaxMetaspaceSize=256m -XX:MetaspaceSize=128m" + } else { + "-XX:MaxPermSize=256m -XX:PermSize=128m" + } + val ENGINE_CONN_DEFAULT_JAVA_OPTS = CommonVars[String]( "wds.linkis.engineConn.javaOpts.default", - s"-XX:+UseG1GC -XX:MaxPermSize=250m -XX:PermSize=128m " + + s"-XX:+UseG1GC ${metaspaceSize} " + s"-Xloggc:%s -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Dwds.linkis.server.conf=linkis-engineconn.properties -Dwds.linkis.gateway.url=${Configuration.getGateWayURL()}" ) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-server/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-server/src/main/assembly/distribution.xml index 41b646a8585..58cf6336853 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-server/src/main/assembly/distribution.xml +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-server/src/main/assembly/distribution.xml @@ -15,15 +15,12 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-application-manager dir - zip false linkis-engineconn-plugin-server diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-server/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminService.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-server/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java similarity index 90% rename from linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-server/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminService.java rename to linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-server/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java index bf636985777..a5656d4396e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-server/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminService.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-server/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java @@ -24,8 +24,9 @@ import org.apache.linkis.common.utils.ZipUtils; import org.apache.linkis.engineplugin.server.dao.EngineConnBmlResourceDao; import org.apache.linkis.engineplugin.server.entity.EngineConnBmlResource; -import org.apache.linkis.engineplugin.server.localize.AbstractEngineConnBmlResourceGenerator; +import org.apache.linkis.engineplugin.server.localize.DefaultEngineConnBmlResourceGenerator; import org.apache.linkis.engineplugin.server.restful.EnginePluginRestful; +import org.apache.linkis.engineplugin.server.service.EnginePluginAdminService; import org.apache.linkis.engineplugin.vo.EnginePluginBMLVo; import org.springframework.beans.factory.annotation.Autowired; @@ -44,12 +45,12 @@ import org.slf4j.LoggerFactory; @Service -public class EnginePluginAdminService - implements org.apache.linkis.engineplugin.server.service.EnginePluginAdminService { +public class EnginePluginAdminServiceImpl implements EnginePluginAdminService { private static final Logger log = LoggerFactory.getLogger(EnginePluginRestful.class); @Autowired private EngineConnBmlResourceDao engineConnBmlResourceDao; - @Autowired private AbstractEngineConnBmlResourceGenerator abstractEngineConnBmlResourceGenerator; + private DefaultEngineConnBmlResourceGenerator defaultEngineConnBmlResourceGenerator = + new DefaultEngineConnBmlResourceGenerator(); private BmlClient bmlClient = BmlClientFactory.createBmlClient(); @@ -87,7 +88,7 @@ public void deleteEnginePluginBML(String ecType, String version, String username // bmlClient.deleteResource(username,engineConnBmlResource.getBmlResourceId()); engineConnBmlResourceDao.delete(engineConnBmlResource); }); - String engineConnsHome = abstractEngineConnBmlResourceGenerator.getEngineConnsHome(); + String engineConnsHome = defaultEngineConnBmlResourceGenerator.getEngineConnsHome(); File file = new File(engineConnsHome + "/" + ecType); if (file.exists()) { deleteDir(file); @@ -113,7 +114,7 @@ public PageInfo queryDataSourceInfoPage( @Override public void uploadToECHome(MultipartFile mfile) { - String engineConnsHome = abstractEngineConnBmlResourceGenerator.getEngineConnsHome(); + String engineConnsHome = defaultEngineConnBmlResourceGenerator.getEngineConnsHome(); try { InputStream in = mfile.getInputStream(); byte[] buffer = new byte[1024]; diff --git a/linkis-computation-governance/linkis-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/pom.xml index a549def22f7..4c54a2157d3 100644 --- a/linkis-computation-governance/linkis-engineconn/pom.xml +++ b/linkis-computation-governance/linkis-engineconn/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-engineconn diff --git a/linkis-computation-governance/linkis-entrance/pom.xml b/linkis-computation-governance/linkis-entrance/pom.xml index 60afddd9c2d..a29399a5a08 100644 --- a/linkis-computation-governance/linkis-entrance/pom.xml +++ b/linkis-computation-governance/linkis-entrance/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-entrance diff --git a/linkis-computation-governance/linkis-entrance/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-entrance/src/main/assembly/distribution.xml index d3513355fad..313506fd085 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/assembly/distribution.xml +++ b/linkis-computation-governance/linkis-entrance/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-ujes-spark-entrance dir diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java new file mode 100644 index 00000000000..05f3041a9d9 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java @@ -0,0 +1,162 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.errorcode; + +public enum EntranceErrorCodeSummary { + UNSUPPORTED_OPERATION(10000, "Unsupported operation(不支持的操作)", "Unsupported operation(不支持的操作)"), + JOBREQ_NOT_NULL(20001, "JobReq can't be null(JobReq不能为空)", "JobReq can't be null(JobReq不能为空)"), + LABEL_NOT_NULL( + 20001, + "The label of userCreator or engineType cannot be null(标签 userCreator 或 engineType 不能为空)", + "The label of userCreator or engineType cannot be null(标签 userCreator 或 engineType 不能为空)"), + NOT_CREATE_EXECUTOR( + 20001, + "Task is not requestPersistTask, cannot to create Executor(Task不是requestPersistTask,不能创建Executor)", + "Task is not requestPersistTask, cannot to create Executor(Task不是requestPersistTask,不能创建Executor)"), + ENTRA_NOT_CREATE_EXECUTOR( + 20001, + "Task is not EntranceJob, cannot to create Executor(Task 不是 EntranceJob,不能创建 Executor)", + "Task is not EntranceJob, cannot to create Executor(Task 不是 EntranceJob,不能创建 Executor)"), + + JOB_NOT_NULL(20002, "job can't be null(job不能为空)", "job can't be null(job不能为空)"), + JOBREQUEST_NOT_NULL( + 20004, + "JobRequest cannot be null, unable to do persist operation(JobRequest 不能为空,无法进行持久化操作)", + "JobRequest cannot be null, unable to do persist operation(JobRequest 不能为空,无法进行持久化操作)"), + INSTANCE_NOT_NULL( + 20004, "The instance can't be null(实例不能为空)", "The instance can't be null(实例不能为空)"), + EXECUTEUSER_NOT_NULL( + 20005, + "The execute user can't be null(执行用户不能为空)", + "The execute user can't be null(执行用户不能为空)"), + + PARAM_NOT_NULL( + 20007, + "The param executionCode can not be empty (参数 executionCode 不能为空)", + "The param executionCode can not be empty (参数 executionCode 不能为空)"), + EXEC_SCRIP_NOT_NULL( + 20007, + "The param executionCode and scriptPath can not be empty at the same time(参数 executionCode 和 scriptPath 不能同时为空)", + "The param executionCode and scriptPath can not be empty at the same time(参数 executionCode 和 scriptPath 不能同时为空)"), + + ONLY_CODE_SUPPORTED( + 20010, + "Only code with runtype supported (仅支持运行类型的代码)", + "Only code with runtype supported (仅支持运行类型的代码)"), + REQUEST_JOBHISTORY_FAILED( + 20011, + "Request jobHistory failed, reason (请求jobHistory失败,原因):", + "Request jobHistory failed, reason (请求jobHistory失败,原因):"), + JOBRESP_PROTOCOL_NULL( + 20011, + "Request jobHistory failed, reason: jobRespProtocol is null (请求jobHistory失败,原因:jobRespProtocol为null)", + "Request jobHistory failed, reason: jobRespProtocol is null (请求jobHistory失败,原因:jobRespProtocol为null)"), + READ_TASKS_FAILED( + 20011, + "Read all tasks failed, reason (读取所有任务失败,原因):", + "Read all tasks failed, reason (读取所有任务失败,原因):"), + + SENDER_RPC_FAILED(20020, "Sender rpc failed(发件人 RPC 失败)", "Sender rpc failed(发件人 RPC 失败)"), + + FAILED_ANALYSIS_TASK( + 20039, + "Failed to analysis task ! the reason is(分析任务失败!原因是):", + "Failed to analysis task ! the reason is(分析任务失败!原因是):"), + + INVALID_ENGINETYPE_NULL( + 20052, + "Invalid engineType null, cannot use cache(无效的engineType null,不能使用缓存)", + "Invalid engineType null, cannot use cache(无效的engineType null,不能使用缓存)"), + PERSIST_JOBREQUEST_ERROR( + 20052, + "Persist jobRequest error, please submit again later(存储Job异常,请稍后重新提交任务)", + "Persist jobRequest error, please submit again later(存储Job异常,请稍后重新提交任务)"), + + INVALID_RESULTSETS( + 20053, + "Invalid resultsets, cannot use cache(结果集无效,无法使用缓存)", + "Invalid resultsets, cannot use cache(结果集无效,无法使用缓存)"), + SUBMITTING_QUERY_FAILED( + 30009, "Submitting the query failed!(提交查询失败!)", "Submitting the query failed!(提交查询失败!)"), + QUERY_STATUS_FAILED( + 50081, + "Query from jobHistory status failed(从 jobHistory 状态查询失败)", + "Query from jobHistory status failed(从 jobHistory 状态查询失败)"), + GET_QUERY_RESPONSE( + 50081, + "Get query response incorrectly(错误地获取查询响应)", + "Get query response incorrectly(错误地获取查询响应)"), + QUERY_TASKID_ERROR( + 50081, + "Query taskId error,taskId(查询 taskId 错误,taskId):", + "Query taskId error,taskId(查询 taskId 错误,taskId):"), + CORRECT_LIST_TYPR( + 50081, + "Query from jobhistory with incorrect list type of taskId, the taskId is ( 从jobhistory 中查询的参数类型不正确,taskId为):", + "Query from jobhistory with incorrect list type of taskId, the taskId is ( 从jobhistory 中查询的参数类型不正确,taskId为):"), + SHELL_BLACKLISTED_CODE( + 50081, + "Shell code contains blacklisted code(shell中包含黑名单代码)", + "Shell code contains blacklisted code(shell中包含黑名单代码)"), + JOB_HISTORY_FAILED_ID(50081, "", ""), + + LOGPATH_NOT_NULL( + 20301, "The logPath cannot be empty(日志路径不能为空)", "The logPath cannot be empty(日志路径不能为空)"); + + /** (errorCode)错误码 */ + private int errorCode; + /** (errorDesc)错误描述 */ + private String errorDesc; + /** Possible reasons for the error(错误可能出现的原因) */ + private String comment; + + EntranceErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java index 5c789e06e84..aa337a0c5e2 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java @@ -45,6 +45,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary.JOBREQ_NOT_NULL; +import static org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary.JOB_NOT_NULL; + public abstract class AbstractEntranceParser extends EntranceParser { private EntranceContext entranceContext; @@ -80,7 +83,8 @@ protected PersistenceManager getPersistenceManager() { @Override public JobRequest parseToJobRequest(Job job) throws EntranceIllegalParamException { if (job == null) { - throw new EntranceIllegalParamException(20002, "job can't be null"); + throw new EntranceIllegalParamException( + JOB_NOT_NULL.getErrorCode(), JOB_NOT_NULL.getErrorDesc()); } JobRequest jobRequest = ((EntranceJob) job).getJobRequest(); if (StringUtils.isEmpty(jobRequest.getReqId())) { @@ -109,7 +113,8 @@ public JobRequest parseToJobRequest(Job job) throws EntranceIllegalParamExceptio @Override public Job parseToJob(JobRequest jobReq) throws EntranceIllegalParamException { if (jobReq == null) { - throw new EntranceIllegalParamException(20001, "JobReq can't be null"); + throw new EntranceIllegalParamException( + JOBREQ_NOT_NULL.getErrorCode(), JOBREQ_NOT_NULL.getErrorDesc()); } EntranceJob job = createEntranceJob(); job.setId(String.valueOf(jobReq.getId())); diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceEngine.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceEngine.java index 2705f4f31d5..188c2ab0d94 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceEngine.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceEngine.java @@ -48,6 +48,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary.*; + public class QueryPersistenceEngine extends AbstractPersistenceEngine { private Sender sender; @@ -104,11 +106,13 @@ private JobRespProtocol sendToJobHistoryAndRetry(RequestProtocol jobReq, String int status = jobRespProtocol.getStatus(); String message = jobRespProtocol.getMsg(); if (status != 0) { - throw new QueryFailedException(20011, "Request jobHistory failed, reason: " + message); + throw new QueryFailedException( + REQUEST_JOBHISTORY_FAILED.getErrorCode(), + REQUEST_JOBHISTORY_FAILED.getErrorDesc() + message); } } else { throw new QueryFailedException( - 20011, "Request jobHistory failed, reason: jobRespProtocol is null "); + JOBRESP_PROTOCOL_NULL.getErrorCode(), JOBRESP_PROTOCOL_NULL.getErrorDesc()); } return jobRespProtocol; } @@ -116,7 +120,8 @@ private JobRespProtocol sendToJobHistoryAndRetry(RequestProtocol jobReq, String @Override public void updateIfNeeded(JobRequest jobReq) throws ErrorException, QueryFailedException { if (null == jobReq) { - throw new EntranceIllegalParamException(20004, "JobReq cannot be null."); + throw new EntranceIllegalParamException( + JOBREQ_NOT_NULL.getErrorCode(), JOBREQ_NOT_NULL.getErrorDesc()); } JobRequest jobReqForUpdate = new JobRequest(); BeanUtils.copyProperties(jobReq, jobReqForUpdate); @@ -138,7 +143,7 @@ public void updateIfNeeded(JobRequest jobReq) throws ErrorException, QueryFailed public void persist(JobRequest jobReq) throws ErrorException { if (null == jobReq) { throw new EntranceIllegalParamException( - 20004, "JobRequest cannot be null, unable to do persist operation"); + JOBREQUEST_NOT_NULL.getErrorCode(), JOBREQUEST_NOT_NULL.getErrorDesc()); } JobReqInsert jobReqInsert = new JobReqInsert(jobReq); JobRespProtocol jobRespProtocol = sendToJobHistoryAndRetry(jobReqInsert, "Insert job"); @@ -162,7 +167,8 @@ public Task[] readAll(String instance) List retList = new ArrayList<>(); if (instance == null || "".equals(instance)) { - throw new EntranceIllegalParamException(20004, "instance can not be null"); + throw new EntranceIllegalParamException( + INSTANCE_NOT_NULL.getErrorCode(), INSTANCE_NOT_NULL.getErrorDesc()); } RequestReadAllTask requestReadAllTask = new RequestReadAllTask(instance); @@ -170,13 +176,15 @@ public Task[] readAll(String instance) try { responsePersist = (ResponsePersist) sender.ask(requestReadAllTask); } catch (Exception e) { - throw new EntranceRPCException(20020, "sender rpc failed ", e); + throw new EntranceRPCException( + SENDER_RPC_FAILED.getErrorCode(), SENDER_RPC_FAILED.getErrorDesc(), e); } if (responsePersist != null) { int status = responsePersist.getStatus(); String message = responsePersist.getMsg(); if (status != 0) { - throw new QueryFailedException(20011, "read all tasks failed, reason: " + message); + throw new QueryFailedException( + READ_TASKS_FAILED.getErrorCode(), READ_TASKS_FAILED.getErrorDesc() + message); } Map data = responsePersist.getData(); Object object = data.get(TaskConstant.TASK); diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala index 6a43cb6a113..1aa2097d690 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala @@ -20,6 +20,7 @@ package org.apache.linkis.entrance import org.apache.linkis.common.exception.{ErrorException, LinkisException, LinkisRuntimeException} import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.cs.CSEntranceHelper +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorException, SubmitFailedException} import org.apache.linkis.entrance.execute.EntranceJob import org.apache.linkis.entrance.log.LogReader @@ -69,8 +70,8 @@ abstract class EntranceServer extends Logging { .persist(jobRequest) if (null == jobRequest.getId || jobRequest.getId <= 0) { throw new EntranceErrorException( - 20052, - "Persist jobRequest error, please submit again later(存储Job异常,请稍后重新提交任务)" + PERSIST_JOBREQUEST_ERROR.getErrorCode, + PERSIST_JOBREQUEST_ERROR.getErrorDesc ) } logger.info(s"received a request,convert $jobRequest") @@ -85,19 +86,15 @@ abstract class EntranceServer extends Logging { case error: ErrorException => error case t1: Throwable => val exception = new EntranceErrorException( - 20039, - "failed to analysis task ! the reason is :" + ExceptionUtils.getRootCauseMessage( - t - ) + "(解析task失败!原因:" + ExceptionUtils.getRootCauseMessage(t) + FAILED_ANALYSIS_TASK.getErrorCode, + FAILED_ANALYSIS_TASK.getErrorDesc + ExceptionUtils.getRootCauseMessage(t) ) exception.initCause(t1) exception case _ => new EntranceErrorException( - 20039, - "failed to analysis task ! the reason is :" + ExceptionUtils.getRootCauseMessage( - t - ) + "(解析task失败!原因:" + ExceptionUtils.getRootCauseMessage(t) + FAILED_ANALYSIS_TASK.getErrorCode, + FAILED_ANALYSIS_TASK.getErrorDesc + ExceptionUtils.getRootCauseMessage(t) ) } jobRequest match { @@ -171,8 +168,8 @@ abstract class EntranceServer extends Logging { case e: LinkisRuntimeException => e case t: Throwable => new SubmitFailedException( - 30009, - "Submitting the query failed!(提交查询失败!)" + ExceptionUtils.getRootCauseMessage(t), + SUBMITTING_QUERY_FAILED.getErrorCode, + SUBMITTING_QUERY_FAILED.getErrorDesc + ExceptionUtils.getRootCauseMessage(t), t ) } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/exception/JobHistoryFailedException.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/exception/JobHistoryFailedException.scala index c4b4b2a4720..6794a39b56f 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/exception/JobHistoryFailedException.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/exception/JobHistoryFailedException.scala @@ -18,5 +18,7 @@ package org.apache.linkis.entrance.exception import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ -case class JobHistoryFailedException(errorMsg: String) extends ErrorException(50081, errorMsg) +case class JobHistoryFailedException(errorMsg: String) + extends ErrorException(JOB_HISTORY_FAILED_ID.getErrorCode, errorMsg) diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/exception/SensitiveTablesCheckException.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/exception/SensitiveTablesCheckException.scala index 90ae00d3a25..44e7e47cf9c 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/exception/SensitiveTablesCheckException.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/exception/SensitiveTablesCheckException.scala @@ -18,7 +18,9 @@ package org.apache.linkis.entrance.exception import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ case class SensitiveTablesCheckException(errorMsg: String) extends ErrorException(50079, errorMsg) -case class DangerousGramsCheckException(errorMsg: String) extends ErrorException(50081, errorMsg) +case class DangerousGramsCheckException(errorMsg: String) + extends ErrorException(JOB_HISTORY_FAILED_ID.getErrorCode, errorMsg) diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala index d22a67f70ff..49fc59ebb23 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala @@ -54,11 +54,8 @@ import org.apache.commons.lang3.exception.ExceptionUtils import java.util import java.util.Date -class DefaultEntranceExecutor( - id: Long, - mark: MarkReq, - entranceExecutorManager: EntranceExecutorManager -) extends EntranceExecutor(id, mark) +class DefaultEntranceExecutor(id: Long) + extends EntranceExecutor(id) with SingleTaskOperateSupport with Logging { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala index 2b80d731a25..44cb3620ced 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala @@ -34,7 +34,7 @@ import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils import org.apache.commons.lang3.exception.ExceptionUtils -abstract class EntranceExecutor(val id: Long, val mark: MarkReq) extends Executor with Logging { +abstract class EntranceExecutor(val id: Long) extends Executor with Logging { private implicit var userWithCreator: UserWithCreator = _ diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala index 43dd275725e..ff79a12c65c 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala @@ -20,6 +20,7 @@ package org.apache.linkis.entrance.execute import org.apache.linkis.common.exception.WarnException import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.EntranceErrorException import org.apache.linkis.entrance.job.EntranceExecutionJob import org.apache.linkis.governance.common.entity.job.JobRequest @@ -46,16 +47,6 @@ abstract class EntranceExecutorManager(groupFactory: GroupFactory) } } - protected def createMarkReq(jobReq: JobRequest): MarkReq = { - val markReq = new MarkReq - markReq.setCreateService(EntranceConfiguration.DEFAULT_CREATE_SERVICE.getValue) - // todo get default config from db - markReq.setProperties(jobReq.getParams) - markReq.setUser(jobReq.getExecuteUser) - markReq.setLabels(LabelUtils.labelsToMap(jobReq.getLabels)) - markReq - } - override def askExecutor(schedulerEvent: SchedulerEvent): Option[Executor] = schedulerEvent match { case job: Job => @@ -109,11 +100,8 @@ abstract class EntranceExecutorManager(groupFactory: GroupFactory) case job: EntranceJob => job.getJobRequest match { case jobRequest: JobRequest => - // CreateMarkReq - val markReq = createMarkReq(jobRequest) - // getMark val entranceEntranceExecutor = - new DefaultEntranceExecutor(idGenerator.incrementAndGet(), markReq, this) + new DefaultEntranceExecutor(idGenerator.incrementAndGet()) // getEngineConn Executor job.getLogListener.foreach( _.onLogUpdate(job, "Your job is being scheduled by orchestrator.") @@ -128,14 +116,14 @@ abstract class EntranceExecutorManager(groupFactory: GroupFactory) entranceEntranceExecutor case _ => throw new EntranceErrorException( - 20001, - "Task is not requestPersistTask, cannot to create Executor" + NOT_CREATE_EXECUTOR.getErrorCode, + NOT_CREATE_EXECUTOR.getErrorDesc ) } case _ => throw new EntranceErrorException( - 20001, - "Task is not EntranceJob, cannot to create Executor" + ENTRA_NOT_CREATE_EXECUTOR.getErrorCode, + ENTRA_NOT_CREATE_EXECUTOR.getErrorDesc ) } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala index 5e9088f7a5c..88b035416d2 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala @@ -21,6 +21,7 @@ import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.Utils import org.apache.linkis.entrance.EntranceContext import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.event._ import org.apache.linkis.entrance.exception.EntranceErrorException import org.apache.linkis.governance.common.entity.job.JobRequest @@ -284,7 +285,11 @@ abstract class EntranceJob extends Job { this.getExecutor match { case entranceExecutor: EntranceExecutor => operate(entranceExecutor) - case _ => throw new EntranceErrorException(10000, "Unsupported operation") + case _ => + throw new EntranceErrorException( + UNSUPPORTED_OPERATION.getErrorCode, + UNSUPPORTED_OPERATION.getErrorDesc + ) } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala index 151d319cc27..82305572984 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala @@ -96,7 +96,7 @@ object RuntimePropertiesExecuteRequestInterceptor extends ExecuteRequestIntercep override def apply(requestTask: RequestTask, executeRequest: ExecuteRequest): RequestTask = executeRequest match { case runtime: RuntimePropertiesExecuteRequest => - mapAsScalaMapConverter(runtime.properties).asScala.foreach { case (k, v) => + runtime.properties.asScala.foreach { case (k, v) => requestTask.data(k, v) } requestTask diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/MarkReq.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/MarkReq.scala deleted file mode 100644 index d04511f29ea..00000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/MarkReq.scala +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.execute - -import java.util - -import scala.beans.BeanProperty -import scala.collection.JavaConverters._ - -class MarkReq { - - /** - * 只包含StartUp参数和RunTime参数 - */ - @BeanProperty - var properties: util.Map[String, Object] = null - - /** - * 启动engineConn必要Label - */ - @BeanProperty - var labels: util.Map[String, AnyRef] = null - - /** - * executeUser - */ - @BeanProperty - var user: String = null - - /** - * 启动的服务:如linkis-entrance - */ - @BeanProperty - var createService: String = null - - @BeanProperty - var description: String = null - - override def equals(obj: Any): Boolean = { - var flag = false - if (null != obj && obj.isInstanceOf[MarkReq]) { - val other = obj.asInstanceOf[MarkReq] - - if (other.getUser != getUser) { - return flag - } - - if (other.getLabels != null && getLabels != null) { - if (getLabels.size() != other.getLabels.size()) { - return false - } - val iterator = other.getLabels.asScala.iterator - while (iterator.hasNext) { - val next = iterator.next() - if (null == next._2 || !next._2.equals(getLabels.get(next._1))) { - return false - } - } - } - flag = true - } - flag - } - - override def hashCode(): Int = super.hashCode() -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/ShellDangerousGrammerInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/ShellDangerousGrammerInterceptor.scala index cabbe84b693..c2b66803953 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/ShellDangerousGrammerInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/ShellDangerousGrammerInterceptor.scala @@ -19,6 +19,7 @@ package org.apache.linkis.entrance.interceptor.impl import org.apache.linkis.common.utils.Logging import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.DangerousGramsCheckException import org.apache.linkis.entrance.interceptor.EntranceInterceptor import org.apache.linkis.governance.common.entity.job.JobRequest @@ -106,7 +107,7 @@ class ShellDangerousGrammerInterceptor extends EntranceInterceptor with Logging ) { logger.info(s"GET REQUEST CODE_TYPE ${codeType} and ENGINE_TYPE ${EngineType}") if (shellContainDangerUsage(jobRequest.getExecutionCode)) { - throw DangerousGramsCheckException("Shell code contains blacklisted code(shell中包含黑名单代码)") + throw DangerousGramsCheckException(SHELL_BLACKLISTED_CODE.getErrorDesc) } jobRequest } else { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala index c0cb7ced381..c3e04b66581 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala @@ -20,6 +20,7 @@ package org.apache.linkis.entrance.log import org.apache.linkis.common.io.FsPath import org.apache.linkis.common.utils.Utils import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.EntranceErrorException import org.apache.linkis.storage.FSFactory import org.apache.linkis.storage.fs.FileSystem @@ -37,7 +38,7 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u extends LogWriter(charset) { if (StringUtils.isBlank(logPath)) - throw new EntranceErrorException(20301, "logPath cannot be empty.") + throw new EntranceErrorException(LOGPATH_NOT_NULL.getErrorCode, LOGPATH_NOT_NULL.getErrorDesc) protected var fileSystem = FSFactory.getFsByProxyUser(new FsPath(logPath), user).asInstanceOf[FileSystem] diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala index faee75936f7..1d5f0cbda9f 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala @@ -48,7 +48,7 @@ abstract class LogReader(charset: String) extends Closeable with Logging { */ def readArray(logs: Array[String], fromLine: Int, size: Int = 100): Int = { if (logs.length != 4) { - throw new LogReadFailedException(" logs的长度必须为4!") + throw new LogReadFailedException(" The length of the log must be 4(logs的长度必须为4)") } val error = new StringBuilder val warning = new StringBuilder diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogWriter.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogWriter.scala index f730abe5093..2850c205395 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogWriter.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogWriter.scala @@ -19,6 +19,7 @@ package org.apache.linkis.entrance.log import org.apache.linkis.common.io.FsPath import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary.LOGPATH_NOT_NULL import org.apache.linkis.entrance.exception.EntranceErrorException import org.apache.linkis.storage.FSFactory import org.apache.linkis.storage.fs.FileSystem @@ -75,7 +76,7 @@ abstract class AbstractLogWriter(logPath: String, user: String, charset: String) extends LogWriter(charset) { if (StringUtils.isBlank(logPath)) { - throw new EntranceErrorException(20301, "logPath cannot be empty.") + throw new EntranceErrorException(LOGPATH_NOT_NULL.getErrorCode, LOGPATH_NOT_NULL.getErrorDesc) } protected var fileSystem = diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala index 46f0fd71661..f1c7378c3e1 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala @@ -56,7 +56,7 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu .newBuilder() .maximumSize(1000) .expireAfterAccess(1, TimeUnit.HOURS) - .refreshAfterWrite(EntranceConfiguration.USER_PARALLEL_REFLESH_TIME.getValue, TimeUnit.MINUTES) + .expireAfterWrite(EntranceConfiguration.USER_PARALLEL_REFLESH_TIME.getValue, TimeUnit.MINUTES) .build(new CacheLoader[String, Integer]() { override def load(key: String): Integer = { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala index c145f23198a..e2881a9c70a 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala @@ -19,6 +19,7 @@ package org.apache.linkis.entrance.parser import org.apache.linkis.common.utils.Logging import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceIllegalParamException} import org.apache.linkis.entrance.persistence.PersistenceManager import org.apache.linkis.entrance.timeout.JobTimeoutManager @@ -94,12 +95,18 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) if (executionContent.containsKey(TaskConstant.CODE)) { code = executionContent.get(TaskConstant.CODE).asInstanceOf[String] runType = executionContent.get(TaskConstant.RUNTYPE).asInstanceOf[String] - if (StringUtils.isEmpty(code)) { - throw new EntranceIllegalParamException(20007, "param executionCode can not be empty ") + if (StringUtils.isBlank(code)) { + throw new EntranceIllegalParamException( + PARAM_NOT_NULL.getErrorCode, + PARAM_NOT_NULL.getErrorDesc + ) } } else { // todo check - throw new EntranceIllegalParamException(20010, "Only code with runtype supported !") + throw new EntranceIllegalParamException( + ONLY_CODE_SUPPORTED.getErrorCode, + PARAM_NOT_NULL.getErrorDesc + ) } val formatCode = params.get(TaskConstant.FORMATCODE).asInstanceOf[Boolean] if (formatCode) code = format(code) @@ -125,7 +132,7 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) } private def checkEngineTypeLabel(labels: util.Map[String, Label[_]]): Unit = { - val engineTypeLabel = labels.asScala.getOrElse(LabelKeyConstant.ENGINE_TYPE_KEY, null) + val engineTypeLabel = labels.getOrDefault(LabelKeyConstant.ENGINE_TYPE_KEY, null) if (null == engineTypeLabel) { val msg = s"You need to specify engineTypeLabel in labels, such as spark-2.4.3" throw new EntranceIllegalParamException( @@ -145,7 +152,7 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) runType: String, labels: util.Map[String, Label[_]] ): Unit = { - val engineRunTypeLabel = labels.asScala.getOrElse(LabelKeyConstant.CODE_TYPE_KEY, null) + val engineRunTypeLabel = labels.getOrDefault(LabelKeyConstant.CODE_TYPE_KEY, null) if (StringUtils.isBlank(runType) && null == engineRunTypeLabel) { val msg = s"You need to specify runType in execution content, such as sql" logger.warn(msg) @@ -171,8 +178,8 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) executeUser: String, labels: util.Map[String, Label[_]] ): Unit = { - var userCreatorLabel = labels.asScala - .getOrElse(LabelKeyConstant.USER_CREATOR_TYPE_KEY, null) + var userCreatorLabel = labels + .getOrDefault(LabelKeyConstant.USER_CREATOR_TYPE_KEY, null) .asInstanceOf[UserCreatorLabel] if (null == userCreatorLabel) { userCreatorLabel = labelBuilderFactory.createLabel(classOf[UserCreatorLabel]) @@ -194,7 +201,10 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) jobReq.setSubmitUser(umUser) } if (umUser == null) { - throw new EntranceIllegalParamException(20005, "execute user can not be null") + throw new EntranceIllegalParamException( + EXECUTEUSER_NOT_NULL.getErrorCode, + EXECUTEUSER_NOT_NULL.getErrorDesc + ) } jobReq.setExecuteUser(umUser) var executionCode = params.get(TaskConstant.EXECUTIONCODE).asInstanceOf[String] @@ -210,7 +220,7 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) .asInstanceOf[util.Map[String, String]] val executeApplicationName = params.get(TaskConstant.EXECUTEAPPLICATIONNAME).asInstanceOf[String] - if (StringUtils.isEmpty(creator)) { + if (StringUtils.isBlank(creator)) { creator = EntranceConfiguration.DEFAULT_REQUEST_APPLICATION_NAME.getValue } // When the execution type is IDE, executioncode and scriptpath cannot be empty at the same time @@ -221,8 +231,8 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) StringUtils.isEmpty(executionCode) ) { throw new EntranceIllegalParamException( - 20007, - "param executionCode and scriptPath can not be empty at the same time" + EXEC_SCRIP_NOT_NULL.getErrorCode, + EXEC_SCRIP_NOT_NULL.getErrorDesc ) } var runType: String = null diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala index 1ffb3ffa28f..650ef230b89 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala @@ -20,6 +20,7 @@ package org.apache.linkis.entrance.scheduler import org.apache.linkis.common.conf.{CommonVars, Configuration} import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} import org.apache.linkis.entrance.execute.EntranceJob import org.apache.linkis.governance.common.protocol.conf.{ @@ -204,7 +205,7 @@ object EntranceGroupFactory { val engineType = labels.asScala.find(_.isInstanceOf[EngineTypeLabel]) val concurrent = labels.asScala.find(_.isInstanceOf[ConcurrentEngineConnLabel]) if (userCreator.isEmpty || engineType.isEmpty) { - throw new EntranceErrorException(20001, "userCreator label or engineType label cannot null") + throw new EntranceErrorException(LABEL_NOT_NULL.getErrorCode, LABEL_NOT_NULL.getErrorDesc) } if (concurrent.isDefined) { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala index 6c1ec4fc2e2..50318e563f3 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala @@ -19,6 +19,7 @@ package org.apache.linkis.entrance.scheduler.cache import org.apache.linkis.common.io.FsPath import org.apache.linkis.common.utils.Utils +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.CacheNotReadyException import org.apache.linkis.entrance.execute.EntranceJob import org.apache.linkis.entrance.persistence.PersistenceManager @@ -71,7 +72,10 @@ class ReadCacheConsumer( "Invalid engineType null, cannot process. jobReq : " + BDPJettyServerHelper.gson .toJson(jobRequest) ) - throw CacheNotReadyException(20052, "Invalid engineType null, cannot use cache.") + throw CacheNotReadyException( + INVALID_ENGINETYPE_NULL.getErrorCode, + INVALID_ENGINETYPE_NULL.getErrorDesc + ) } val readCacheBefore = TaskUtils .getRuntimeMap(job.getParams) @@ -95,7 +99,10 @@ class ReadCacheConsumer( ) .getSchemaPath // persistenceManager.onResultSetCreated(job, new CacheOutputExecuteResponse(alias, output)) - throw CacheNotReadyException(20053, "Invalid resultsets, cannot use cache.") + throw CacheNotReadyException( + INVALID_RESULTSETS.getErrorCode, + INVALID_RESULTSETS.getErrorDesc + ) // todo check } // persistenceManager.onResultSizeCreated(job, resultSets.size()) diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala index af8288d422b..aaaf131bd86 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala @@ -43,9 +43,9 @@ class JobTimeoutManager extends Logging { def add(jobKey: String, job: EntranceJob): Unit = { logger.info(s"Adding timeout job: ${job.getId()}") - if (!timeoutJobByName.asScala.contains(jobKey)) { + if (!timeoutJobByName.containsKey(jobKey)) { synchronized { - if (!timeoutJobByName.asScala.contains(jobKey)) { + if (!timeoutJobByName.containsKey(jobKey)) { timeoutJobByName.put(jobKey, job) } } @@ -65,7 +65,7 @@ class JobTimeoutManager extends Logging { } def jobExist(jobKey: String): Boolean = { - timeoutJobByName.asScala.contains(jobKey) + timeoutJobByName.containsKey(jobKey) } def jobCompleteDelete(jobkey: String): Unit = { @@ -154,9 +154,9 @@ object JobTimeoutManager { // If the timeout label set by the user is invalid, execution is not allowed def checkTimeoutLabel(labels: util.Map[String, Label[_]]): Unit = { val jobQueuingTimeoutLabel = - labels.asScala.getOrElse(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, null) + labels.getOrDefault(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, null) val jobRunningTimeoutLabel = - labels.asScala.getOrElse(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, null) + labels.getOrDefault(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, null) val posNumPattern = "^[0-9]+$" if ( (null != jobQueuingTimeoutLabel && !jobQueuingTimeoutLabel.getStringValue.matches( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala index d912e711094..e60a5e2181e 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala @@ -20,6 +20,7 @@ package org.apache.linkis.entrance.utils import org.apache.linkis.common.exception.ErrorException import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.JobHistoryFailedException import org.apache.linkis.entrance.execute.EntranceJob import org.apache.linkis.governance.common.constant.job.JobRequestConstants @@ -136,7 +137,7 @@ object JobHistoryHelper extends Logging { val status = responsePersist.getStatus if (status != SUCCESS_FLAG) { logger.error(s"query from jobHistory status failed, status is $status") - throw JobHistoryFailedException("query from jobHistory status failed") + throw JobHistoryFailedException(QUERY_STATUS_FAILED.getErrorDesc) } else { val data = responsePersist.getData data.get(JobRequestConstants.JOB_HISTORY_LIST) match { @@ -144,19 +145,17 @@ object JobHistoryHelper extends Logging { if (tasks.size() > 0) tasks.get(0) else null case _ => - throw JobHistoryFailedException( - s"query from jobhistory not a correct List type taskId is $taskID" - ) + throw JobHistoryFailedException(CORRECT_LIST_TYPR.getErrorDesc + s"$taskID") } } case _ => logger.error("get query response incorrectly") - throw JobHistoryFailedException("get query response incorrectly") + throw JobHistoryFailedException(GET_QUERY_RESPONSE.getErrorDesc) } } { case errorException: ErrorException => throw errorException case e: Exception => - val e1 = JobHistoryFailedException(s"query taskId $taskID error") + val e1 = JobHistoryFailedException(QUERY_TASKID_ERROR.getErrorDesc + s"$taskID") e1.initCause(e) throw e } diff --git a/linkis-computation-governance/linkis-manager/distribution.xml b/linkis-computation-governance/linkis-manager/distribution.xml new file mode 100644 index 00000000000..4b8028abcc2 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/distribution.xml @@ -0,0 +1,297 @@ + + + + + linkis-publicservice + + dir + + false + linkis-manager + + + + lib + true + true + false + false + true + + antlr:antlr:jar + aopalliance:aopalliance:jar + asm:asm:jar + cglib:cglib:jar + com.amazonaws:aws-java-sdk-autoscaling:jar + com.amazonaws:aws-java-sdk-core:jar + com.amazonaws:aws-java-sdk-ec2:jar + com.amazonaws:aws-java-sdk-route53:jar + com.amazonaws:aws-java-sdk-sts:jar + com.amazonaws:jmespath-java:jar + com.fasterxml.jackson.core:jackson-annotations:jar + com.fasterxml.jackson.core:jackson-core:jar + com.fasterxml.jackson.core:jackson-databind:jar + com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:jar + com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar + com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:jar + com.fasterxml.jackson.module:jackson-module-jaxb-annotations:jar + com.fasterxml.jackson.module:jackson-module-parameter-names:jar + com.fasterxml.jackson.module:jackson-module-paranamer:jar + com.fasterxml.jackson.module:jackson-module-scala_2.11:jar + com.github.andrewoma.dexx:dexx-collections:jar + com.github.vlsi.compactmap:compactmap:jar + com.google.code.findbugs:annotations:jar + com.google.code.findbugs:jsr305:jar + com.google.code.gson:gson:jar + com.google.guava:guava:jar + com.google.errorprone:error_prone_annotations:jar + com.google.guava:failureaccess:jar + com.google.guava:listenablefuture:jar:9999.0-empty-to-avoid-conflict-with-guava + com.google.inject:guice:jar + com.google.protobuf:protobuf-java:jar + com.netflix.archaius:archaius-core:jar + com.netflix.eureka:eureka-client:jar + com.netflix.eureka:eureka-core:jar + com.netflix.hystrix:hystrix-core:jar + com.netflix.netflix-commons:netflix-commons-util:jar + com.netflix.netflix-commons:netflix-eventbus:jar + com.netflix.netflix-commons:netflix-infix:jar + com.netflix.netflix-commons:netflix-statistics:jar + com.netflix.ribbon:ribbon:jar + com.netflix.ribbon:ribbon-core:jar + com.netflix.ribbon:ribbon-eureka:jar + com.netflix.ribbon:ribbon-httpclient:jar + com.netflix.ribbon:ribbon-loadbalancer:jar + com.netflix.ribbon:ribbon-transport:jar + com.netflix.servo:servo-core:jar + com.ning:async-http-client:jar + com.sun.jersey.contribs:jersey-apache-client4:jar + com.sun.jersey:jersey-client:jar + com.sun.jersey:jersey-core:jar + com.sun.jersey:jersey-json:jar + com.sun.jersey:jersey-server:jar + com.sun.jersey:jersey-servlet:jar + com.sun.xml.bind:jaxb-impl:jar + com.thoughtworks.paranamer:paranamer:jar + com.thoughtworks.xstream:xstream:jar + org.apache.linkis:linkis-common:jar + org.apache.linkis:linkis-module:jar + commons-beanutils:commons-beanutils:jar + commons-beanutils:commons-beanutils-core:jar + commons-cli:commons-cli:jar + commons-codec:commons-codec:jar + commons-collections:commons-collections:jar + commons-configuration:commons-configuration:jar + commons-daemon:commons-daemon:jar + commons-dbcp:commons-dbcp:jar + commons-digester:commons-digester:jar + commons-httpclient:commons-httpclient:jar + commons-io:commons-io:jar + commons-jxpath:commons-jxpath:jar + commons-lang:commons-lang:jar + commons-logging:commons-logging:jar + commons-net:commons-net:jar + commons-pool:commons-pool:jar + io.micrometer:micrometer-core:jar + io.netty:netty:jar + io.netty:netty-all:jar + io.netty:netty-buffer:jar + io.netty:netty-codec:jar + io.netty:netty-codec-http:jar + io.netty:netty-common:jar + io.netty:netty-handler:jar + io.netty:netty-transport:jar + io.netty:netty-transport-native-epoll:jar + io.reactivex:rxjava:jar + io.reactivex:rxnetty:jar + io.reactivex:rxnetty-contexts:jar + io.reactivex:rxnetty-servo:jar + javax.activation:activation:jar + javax.annotation:javax.annotation-api:jar + javax.inject:javax.inject:jar + javax.servlet:javax.servlet-api:jar + javax.servlet.jsp:jsp-api:jar + javax.validation:validation-api:jar + javax.websocket:javax.websocket-api:jar + javax.ws.rs:javax.ws.rs-api:jar + javax.xml.bind:jaxb-api:jar + javax.xml.stream:stax-api:jar + joda-time:joda-time:jar + log4j:log4j:jar + mysql:mysql-connector-java:jar + net.databinder.dispatch:dispatch-core_2.11:jar + net.databinder.dispatch:dispatch-json4s-jackson_2.11:jar + org.antlr:antlr-runtime:jar + org.antlr:stringtemplate:jar + org.apache.commons:commons-compress:jar + org.apache.commons:commons-math:jar + org.apache.commons:commons-math3:jar + org.apache.curator:curator-client:jar + org.apache.curator:curator-framework:jar + org.apache.curator:curator-recipes:jar + org.apache.directory.api:api-asn1-api:jar + org.apache.directory.api:api-util:jar + org.apache.directory.server:apacheds-i18n:jar + org.apache.directory.server:apacheds-kerberos-codec:jar + org.apache.hadoop:hadoop-annotations:jar + org.apache.hadoop:hadoop-auth:jar + org.apache.hadoop:hadoop-common:jar + org.apache.hadoop:hadoop-hdfs:jar + org.apache.htrace:htrace-core:jar + org.apache.httpcomponents:httpclient:jar + org.apache.httpcomponents:httpcore:jar + org.apache.logging.log4j:log4j-api:jar + org.apache.logging.log4j:log4j-core:jar + org.apache.logging.log4j:log4j-jul:jar + org.apache.logging.log4j:log4j-slf4j-impl:jar + org.apache.zookeeper:zookeeper:jar + org.aspectj:aspectjweaver:jar + org.bouncycastle:bcpkix-jdk15on:jar + org.bouncycastle:bcprov-jdk15on:jar + org.codehaus.jackson:jackson-jaxrs:jar + org.codehaus.jackson:jackson-xc:jar + org.codehaus.jettison:jettison:jar + org.codehaus.woodstox:stax2-api:jar + org.codehaus.woodstox:woodstox-core-asl:jar + org.eclipse.jetty:jetty-annotations:jar + org.eclipse.jetty:jetty-client:jar + org.eclipse.jetty:jetty-continuation:jar + org.eclipse.jetty:jetty-http:jar + org.eclipse.jetty:jetty-io:jar + org.eclipse.jetty:jetty-jndi:jar + org.eclipse.jetty:jetty-plus:jar + org.eclipse.jetty:jetty-security:jar + org.eclipse.jetty:jetty-server:jar + org.eclipse.jetty:jetty-servlet:jar + org.eclipse.jetty:jetty-servlets:jar + org.eclipse.jetty:jetty-util:jar + org.eclipse.jetty:jetty-webapp:jar + org.eclipse.jetty:jetty-xml:jar + org.eclipse.jetty.websocket:javax-websocket-client-impl:jar + org.eclipse.jetty.websocket:javax-websocket-server-impl:jar + org.eclipse.jetty.websocket:websocket-api:jar + org.eclipse.jetty.websocket:websocket-client:jar + org.eclipse.jetty.websocket:websocket-common:jar + org.eclipse.jetty.websocket:websocket-server:jar + org.eclipse.jetty.websocket:websocket-servlet:jar + org.fusesource.leveldbjni:leveldbjni-all:jar + org.glassfish.hk2:class-model:jar + org.glassfish.hk2:config-types:jar + org.glassfish.hk2.external:aopalliance-repackaged:jar + org.glassfish.hk2.external:asm-all-repackaged:jar + org.glassfish.hk2.external:bean-validator:jar + org.glassfish.hk2.external:javax.inject:jar + org.glassfish.hk2:hk2:jar + org.glassfish.hk2:hk2-api:jar + org.glassfish.hk2:hk2-config:jar + org.glassfish.hk2:hk2-core:jar + org.glassfish.hk2:hk2-locator:jar + org.glassfish.hk2:hk2-runlevel:jar + org.glassfish.hk2:hk2-utils:jar + org.glassfish.hk2:osgi-resource-locator:jar + org.glassfish.hk2:spring-bridge:jar + org.glassfish.jersey.bundles:jaxrs-ri:jar + org.glassfish.jersey.bundles.repackaged:jersey-guava:jar + org.glassfish.jersey.containers:jersey-container-servlet:jar + org.glassfish.jersey.containers:jersey-container-servlet-core:jar + org.glassfish.jersey.core:jersey-client:jar + org.glassfish.jersey.core:jersey-common:jar + org.glassfish.jersey.core:jersey-server:jar + org.glassfish.jersey.ext:jersey-entity-filtering:jar + org.glassfish.jersey.ext:jersey-spring3:jar + org.glassfish.jersey.media:jersey-media-jaxb:jar + org.glassfish.jersey.media:jersey-media-json-jackson:jar + org.glassfish.jersey.media:jersey-media-multipart:jar + org.hdrhistogram:HdrHistogram:jar + org.javassist:javassist:jar + org.json4s:json4s-ast_2.11:jar + org.json4s:json4s-core_2.11:jar + org.json4s:json4s-jackson_2.11:jar + org.jsoup:jsoup:jar + org.jvnet.mimepull:mimepull:jar + org.jvnet:tiger-types:jar + org.latencyutils:LatencyUtils:jar + org.mortbay.jasper:apache-el:jar + org.mortbay.jetty:jetty:jar + org.mortbay.jetty:jetty-util:jar + org.ow2.asm:asm-analysis:jar + org.ow2.asm:asm-commons:jar + org.ow2.asm:asm-tree:jar + org.reflections:reflections:jar + org.scala-lang.modules:scala-parser-combinators_2.11:jar + org.scala-lang.modules:scala-xml_2.11:jar + org.scala-lang:scala-compiler:jar + org.scala-lang:scala-library:jar + org.scala-lang:scala-reflect:jar + org.scala-lang:scalap:jar + org.slf4j:jul-to-slf4j:jar + org.slf4j:slf4j-api:jar + org.springframework.boot:spring-boot:jar + org.springframework.boot:spring-boot-actuator:jar + org.springframework.boot:spring-boot-actuator-autoconfigure:jar + org.springframework.boot:spring-boot-autoconfigure:jar + org.springframework.boot:spring-boot-starter:jar + org.springframework.boot:spring-boot-starter-actuator:jar + org.springframework.boot:spring-boot-starter-aop:jar + org.springframework.boot:spring-boot-starter-jetty:jar + org.springframework.boot:spring-boot-starter-json:jar + org.springframework.boot:spring-boot-starter-log4j2:jar + org.springframework.boot:spring-boot-starter-web:jar + org.springframework.cloud:spring-cloud-commons:jar + org.springframework.cloud:spring-cloud-config-client:jar + org.springframework.cloud:spring-cloud-context:jar + org.springframework.cloud:spring-cloud-netflix-archaius:jar + org.springframework.cloud:spring-cloud-netflix-core:jar + org.springframework.cloud:spring-cloud-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-netflix-ribbon:jar + org.springframework.cloud:spring-cloud-starter:jar + org.springframework.cloud:spring-cloud-starter-config:jar + org.springframework.cloud:spring-cloud-starter-eureka:jar + org.springframework.cloud:spring-cloud-starter-netflix-archaius:jar + org.springframework.cloud:spring-cloud-starter-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-starter-netflix-ribbon:jar + org.springframework.security:spring-security-crypto:jar + org.springframework.security:spring-security-rsa:jar + org.springframework:spring-aop:jar + org.springframework:spring-beans:jar + org.springframework:spring-context:jar + org.springframework:spring-core:jar + org.springframework:spring-expression:jar + org.springframework:spring-jcl:jar + org.springframework:spring-web:jar + org.springframework:spring-webmvc:jar + org.tukaani:xz:jar + org.yaml:snakeyaml:jar + software.amazon.ion:ion-java:jar + xerces:xercesImpl:jar + xmlenc:xmlenc:jar + xmlpull:xmlpull:jar + xpp3:xpp3_min:jar + com.google.j2objc:j2objc-annotations:jar + + + + + + + + + diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml b/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml index 966eff5378c..7995de49d40 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - ../pom.xml + ../../../pom.xml linkis-application-manager diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/assembly/distribution.xml index 4c84bb2c6f2..9340d7fc730 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/assembly/distribution.xml +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-application-manager dir diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java index cb4e7919266..8f36f467fb0 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java @@ -60,15 +60,25 @@ public static ResourceVo getStringToMap(String str, ECResourceInfoRecord info) { Map divermap = MapUtils.getMap(map, "driver"); resourceVo.setInstance(((Double) divermap.get("instance")).intValue()); resourceVo.setCores(((Double) divermap.get("cpu")).intValue()); - resourceVo.setMemory(ByteTimeUtils.byteStringAsBytes(divermap.get("memory").toString())); + String memoryStr = String.valueOf(map.get("memory")); + long memorylong = 0; + if (!getScientific(memoryStr)) { + memorylong = ByteTimeUtils.byteStringAsBytes(memoryStr); + } + resourceVo.setMemory(memorylong); return resourceVo; } else { logger.warn("Compatible with old data ,{},{}", info.getLabelValue(), info); return null; // Compatible with old data } } + String memoryStr = String.valueOf(map.get("memory")); + long memorylong = 0; + if (!getScientific(memoryStr)) { + memorylong = ByteTimeUtils.byteStringAsBytes(memoryStr); + } resourceVo.setInstance(((Double) map.get("instance")).intValue()); - resourceVo.setMemory(ByteTimeUtils.byteStringAsBytes((map.get("memory").toString()))); + resourceVo.setMemory(memorylong); Double core = null == map.get("cpu") ? (Double) map.get("cores") : (Double) map.get("cpu"); resourceVo.setCores(core.intValue()); } @@ -95,4 +105,10 @@ public static AMEngineNode convertECInfoTOECNode(ECResourceInfoRecord ecInfo) { engineNode.setUpdateTime(ecInfo.getReleaseTime()); return engineNode; } + + public static boolean getScientific(String input) { + String regx = "^((-?\\d+.?\\d*)[Ee]{1}(-?\\d+))$"; + Pattern pattern = Pattern.compile(regx); + return pattern.matcher(input).matches(); + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.scala index c0222274045..65e41ae1c2a 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.scala @@ -71,33 +71,35 @@ class DefaultEngineReuseService extends AbstractEngineService with EngineReuseSe override def reuseEngine(engineReuseRequest: EngineReuseRequest, sender: Sender): EngineNode = { logger.info(s"Start to reuse Engine for request: $engineReuseRequest") val labelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory - var labelList: util.List[Label[_]] = LabelUtils.distinctLabel( - labelBuilderFactory.getLabels(engineReuseRequest.getLabels), - userLabelService.getUserLabels(engineReuseRequest.getUser) - ) + val labelList = LabelUtils + .distinctLabel( + labelBuilderFactory.getLabels(engineReuseRequest.getLabels), + userLabelService.getUserLabels(engineReuseRequest.getUser) + ) + .asScala val exclusionInstances: Array[String] = - labelList.asScala.find(_.isInstanceOf[ReuseExclusionLabel]) match { + labelList.find(_.isInstanceOf[ReuseExclusionLabel]) match { case Some(l) => l.asInstanceOf[ReuseExclusionLabel].getInstances case None => Array.empty[String] } - labelList = labelList.asScala.filter(_.isInstanceOf[EngineNodeLabel]).asJava + var filterLabelList = labelList.filter(_.isInstanceOf[EngineNodeLabel]).asJava val engineConnAliasLabel = labelBuilderFactory.createLabel(classOf[AliasServiceInstanceLabel]) engineConnAliasLabel.setAlias(GovernanceCommonConf.ENGINE_CONN_SPRING_NAME.getValue) - labelList.add(engineConnAliasLabel) + filterLabelList.add(engineConnAliasLabel) // label chooser if (null != engineReuseLabelChoosers) { engineReuseLabelChoosers.asScala.foreach { chooser => - labelList = chooser.chooseLabels(labelList) + filterLabelList = chooser.chooseLabels(filterLabelList) } } - val instances = nodeLabelService.getScoredNodeMapsByLabels(labelList) + val instances = nodeLabelService.getScoredNodeMapsByLabels(filterLabelList) if (null != instances && null != exclusionInstances && exclusionInstances.nonEmpty) { val instancesKeys = instances.asScala.keys.toArray diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala index f58e9006529..5a3e6d7151d 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala @@ -118,7 +118,7 @@ class DefaultNodeLabelService extends NodeLabelService with Logging { val willBeAdd = newKeyList.diff(oldKeyList) val willBeUpdate = oldKeyList.diff(willBeDelete) val modifiableKeyList = LabelUtils.listAllUserModifiableLabel() - if (!CollectionUtils.isEmpty(willBeDelete.asJava)) { + if (null != willBeDelete && willBeDelete.nonEmpty) { nodeLabels.asScala.foreach(nodeLabel => { if ( modifiableKeyList.contains(nodeLabel.getLabelKey) && willBeDelete @@ -135,7 +135,7 @@ class DefaultNodeLabelService extends NodeLabelService with Logging { * update step: * 1.delete relations of old labels 2.add new relation between new labels and instance */ - if (!CollectionUtils.isEmpty(willBeUpdate.asJava)) { + if (null != willBeDelete && willBeDelete.nonEmpty) { labels.asScala.foreach(label => { if ( modifiableKeyList.contains(label.getLabelKey) && willBeUpdate @@ -156,7 +156,7 @@ class DefaultNodeLabelService extends NodeLabelService with Logging { } }) } - if (!CollectionUtils.isEmpty(willBeAdd.asJava)) { + if (null != willBeAdd && willBeAdd.nonEmpty) { labels.asScala .filter(label => willBeAdd.contains(label.getLabelKey)) .foreach(label => { diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.scala index 7190fa0c880..b0cb0524b31 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.scala @@ -65,7 +65,7 @@ class DefaultUserLabelService extends UserLabelService with Logging { // 4.找出重复key,删除这个relation duplicatedKeyLabel.foreach(l => { labelManagerPersistence.removeLabelFromUser(user, util.Arrays.asList(l.getId)) - userRelationLabels.asScala.toList.asJava.remove(duplicatedKeyLabel.get) + userRelationLabels.remove(duplicatedKeyLabel.get) }) // 5.插入新的relation 需要抛出duplicateKey异常,回滚 labelManagerPersistence.addLabelToUser(user, util.Arrays.asList(dbLabel.getId)) diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/pom.xml b/linkis-computation-governance/linkis-manager/linkis-label-common/pom.xml index bbc0970251e..3042f5068fd 100644 --- a/linkis-computation-governance/linkis-manager/linkis-label-common/pom.xml +++ b/linkis-computation-governance/linkis-manager/linkis-label-common/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - ../../pom.xml + ../../../pom.xml linkis-label-common diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/pom.xml b/linkis-computation-governance/linkis-manager/linkis-manager-common/pom.xml index 13bde6cf4e6..f8c3228ad8d 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/pom.xml +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - + ../../../pom.xml linkis-manager-common diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/pom.xml b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/pom.xml index ab3038de367..f4ca736fe1b 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/pom.xml +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - + ../../../pom.xml linkis-manager-persistence diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/LockManagerMapper.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/LockManagerMapper.java index 7bad8b4ef42..023822f4e58 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/LockManagerMapper.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/LockManagerMapper.java @@ -25,21 +25,12 @@ @Mapper public interface LockManagerMapper { - @Insert( - "insert into linkis_cg_manager_lock (lock_object, time_out, update_time, create_time)" - + "values(#{jsonObject}, #{timeOut}, now(), now())") - void lock(@Param("jsonObject") String jsonObject, @Param("timeOut") Long timeOut); - - @Delete("delete from linkis_cg_manager_lock where id = #{id}") - void unlock(Integer id); - - @Select("select * from linkis_cg_manager_lock where lock_object = #{jsonObject}") - List getLockersByLockObject(String jsonObject); - - @Select("select * from linkis_cg_manager_lock") - @Results({ - @Result(property = "updateTime", column = "update_time"), - @Result(property = "createTime", column = "create_time") - }) + + void lock(@Param("lockObject") String lockObject, @Param("timeOut") Long timeOut); + + void unlock(@Param("id") Integer id); + + List getLockersByLockObject(@Param("lockObject") String lockObject); + List getAll(); } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java index e8d505ef33e..f60cee548db 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java @@ -62,15 +62,15 @@ void updateNodeInstance( @Update({ "update linkis_cg_manager_service_instance set owner = #{persistenceNode.owner},mark = #{persistenceNode.mark},name = #{persistenceNode.name}," - + "update_time = #{persistenceNode.updateTime},create_time = #{persistenceNode.createTime},updator = #{persistenceNode.updator},creator = #{persistenceNode.creator} where instance = #{instance}" + + "update_time = #{persistenceNode.updateTime},create_time = #{persistenceNode.createTime},updator = #{persistenceNode.updator},creator = #{persistenceNode.creator} where instance = #{persistenceNode.instance}" }) - void updateNodeInstanceOverload(PersistenceNode persistenceNode); + void updateNodeInstanceOverload(@Param("persistenceNode") PersistenceNode persistenceNode); @Select("select id from linkis_cg_manager_service_instance where instance = #{instance}") - int getNodeInstanceId(@Param("instance") String instance); + Integer getNodeInstanceId(@Param("instance") String instance); @Select("select id from linkis_cg_manager_service_instance where instance = #{instance}") - int getIdByInstance(@Param("instance") String instance); + Integer getIdByInstance(@Param("instance") String instance); @Select( "") - List getNodeInstanceIds(@Param("serviceInstances") List instances); + List getNodeInstanceIds(@Param("instances") List instances); @Select("select * from linkis_cg_manager_service_instance where instance = #{instance}") @Results({ @@ -122,7 +122,7 @@ void updateNodeInstance( @Result(property = "updateTime", column = "update_time"), @Result(property = "createTime", column = "create_time") }) - List getNodesByInstances(@Param("engineNodeIds") List instances); + List getNodesByInstances(@Param("instances") List instances); @Insert( "insert into linkis_cg_manager_engine_em (engine_instance, em_instance, update_time, create_time)" diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeMetricManagerMapper.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeMetricManagerMapper.java index 2a8aeb6febd..8d9397ff962 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeMetricManagerMapper.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeMetricManagerMapper.java @@ -26,76 +26,21 @@ public interface NodeMetricManagerMapper { - @Insert( - "insert into linkis_cg_manager_service_instance_metrics (instance, instance_status, overload, heartbeat_msg,healthy_status,update_time,create_time)" - + "values(#{nodeMetrics.instance},#{nodeMetrics.status},#{nodeMetrics.overLoad},#{nodeMetrics.heartBeatMsg},#{nodeMetrics.healthy},now(),now())") void addNodeMetrics(@Param("nodeMetrics") PersistenceNodeMetrics nodeMetrics); - @Select( - "select count(id) from linkis_cg_manager_service_instance_metrics met inner join linkis_cg_manager_service_instance ins" - + " on met.instance = #{instance} and ins.instance = #{instance} and met.instance = ins.instance") - int checkInstanceExist(@Param("instance") String instance); + Integer checkInstanceExist(@Param("instance") String instance); - @Select( - "") - @Results({ - @Result(property = "updateTime", column = "update_time"), - @Result(property = "createTime", column = "create_time"), - @Result(property = "instance", column = "instance"), - @Result(property = "heartBeatMsg", column = "heartbeat_msg"), - @Result(property = "status", column = "instance_status"), - @Result(property = "healthy", column = "healthy_status") - }) List getNodeMetricsByInstances( @Param("instances") List instances); - @Select("select * from linkis_cg_manager_service_instance_metrics where instance = #{instance}") - @Results({ - @Result(property = "updateTime", column = "update_time"), - @Result(property = "createTime", column = "create_time"), - @Result(property = "instance", column = "instance"), - @Result(property = "heartBeatMsg", column = "heartbeat_msg"), - @Result(property = "status", column = "instance_status"), - @Result(property = "healthy", column = "healthy_status") - }) PersistenceNodeMetrics getNodeMetricsByInstance(@Param("instance") String instance); - @Update({ - "" - }) void updateNodeMetrics( @Param("nodeMetrics") PersistenceNodeMetrics nodeMetrics, @Param("instance") String instance); - @Delete( - "delete from linkis_cg_manager_service_instance_metrics where instance in (select instance from linkis_cg_manager_service_instance where instance=#{instance})") void deleteNodeMetrics(@Param("instance") String instance); - @Delete("delete from linkis_cg_manager_service_instance_metrics where instance = #{instance}") void deleteNodeMetricsByInstance(@Param("instance") String instance); - @Select( - "select A.name,B.* from linkis_cg_manager_service_instance A join linkis_cg_manager_service_instance_metrics B where A.instance = B.instance") - @Results({ - @Result(property = "instance", column = "instance"), - @Result(property = "heartBeatMsg", column = "heartbeat_msg"), - @Result(property = "status", column = "instance_status"), - @Result(property = "healthy", column = "healthy_status"), - @Result(property = "updateTime", column = "update_time"), - @Result(property = "createTime", column = "create_time") - }) - @ResultType(PersistenceNodeMetricsEntity.class) List getAllNodeMetrics(); } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/ResourceManagerMapper.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/ResourceManagerMapper.java index 8e2754c60fc..ea52a66d2ca 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/ResourceManagerMapper.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/ResourceManagerMapper.java @@ -25,75 +25,35 @@ import java.util.List; public interface ResourceManagerMapper { - @Options(useGeneratedKeys = true, keyProperty = "id", keyColumn = "id") - // @SelectKey(statement = "select last_insert_id() AS id", keyProperty = "id", before = false, - // resultType = int.class) - @Insert( - "INSERT INTO linkis_cg_manager_linkis_resources VALUES" - + "(#{id},#{maxResource},#{minResource},#{usedResource},#{leftResource},#{expectedResource},#{lockedResource},#{resourceType},#{ticketId},now(),now(),#{updator},#{creator})") + void registerResource(PersistenceResource persistenceResource); - @Update({ - "update linkis_cg_manager_linkis_resources set max_resource = #{persistenceResource.maxResource},min_resource = #{persistenceResource.minResource}, " - + "used_resource=#{persistenceResource.usedResource},left_resource=#{persistenceResource.leftResource},expected_resource=#{persistenceResource.expectedResource}," - + "locked_resource=#{persistenceResource.lockedResource}," - + "update_time=#{persistenceResource.updateTime} where ticketId = #{ticketId}" - }) void nodeResourceUpdate( @Param("ticketId") String ticketId, @Param("persistenceResource") PersistenceResource persistenceResource); - @Update({ - "update linkis_cg_manager_linkis_resources set max_resource = #{persistenceResource.maxResource},min_resource = #{persistenceResource.minResource}," - + "used_resource=#{persistenceResource.usedResource},left_resource=#{persistenceResource.leftResource},expected_resource=#{persistenceResource.expectedResource}," - + "locked_resource=#{persistenceResource.lockedResource}," - + "update_time=#{persistenceResource.updateTime} where id = #{resourceId}" - }) void nodeResourceUpdateByResourceId( @Param("resourceId") int resourceId, @Param("persistenceResource") PersistenceResource persistenceResource); - @Select( - "select id from linkis_cg_manager_linkis_resources where ticketId is null and id in ( select resource_id from linkis_cg_manager_label_resource A join linkis_cg_manager_label_service_instance B on A.label_id=B.label_id and B.service_instance=#{instance})") - int getNodeResourceUpdateResourceId(@Param("instance") String instance); + Integer getNodeResourceUpdateResourceId(@Param("instance") String instance); - @Delete( - "delete from linkis_cg_manager_label_resource where label_id in (select label_id from linkis_cg_manager_label_service_instance where service_instance=#{instance})") void deleteResourceAndLabelId(@Param("instance") String instance); - @Delete( - "delete from linkis_cg_manager_linkis_resources where id in " - + "(select resource_id from linkis_cg_manager_label_resource A join linkis_cg_manager_label_service_instance B on A.label_id=B.label_id and B.service_instance = #{instance} )") void deleteResourceByInstance(@Param("instance") String instance); @Delete("delete from linkis_cg_manager_linkis_resources where ticketId = #{ticketId}") void deleteResourceByTicketId(@Param("ticketId") String ticketId); - // @Select("select * from linkis_cg_manager_linkis_resources where id = #{id} adn - // resourceType = #{resourceType}") - // PersistenceResource getResourceByIdAndType(@Param("id") int id,@Param("resourceType") - // String resourceType); - - @Select( - "select * from linkis_cg_manager_linkis_resources where resourceType = #{resourceType} and" - + " id in (select resource_id from linkis_cg_manager_label_resource A join linkis_cg_manager_label_service_instance B on A.label_id = B.label_id and B.service_instance=#{instance})") List getResourceByInstanceAndResourceType( @Param("instance") String instance, @Param("resourceType") String resourceType); - @Select( - "select * from linkis_cg_manager_linkis_resources where id in " - + "(select resource_id from linkis_cg_manager_label_resource A join linkis_cg_manager_label_service_instance B on A.label_id = B.label_id and B.service_instance= #{instance})") List getResourceByServiceInstance(@Param("instance") String instance); - @Select("select * from linkis_cg_manager_linkis_resources where ticketId = #{ticketId}") PersistenceResource getNodeResourceByTicketId(@Param("ticketId") String ticketId); - @Select( - "select * from linkis_cg_manager_linkis_resources where id in (select resource_id from linkis_cg_manager_label_resource A join linkis_manager_lable_user B on A.label_id=B.label_id AND B.user_name=#{userName})") List getResourceByUserName(@Param("userName") String userName); - @Select( - "select * from linkis_cg_manager_label where id in (select label_id from linkis_cg_manager_label_resource A join linkis_cg_manager_linkis_resources B on A.resource_id=B.id and B.ticketId=#{ticketId})") List getLabelsByTicketId(@Param("ticketId") String ticketId); void deleteResourceById(@Param("ids") List ids); diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/LockManagerMapper.xml b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/LockManagerMapper.xml index bdcc15c4e31..7814ee77ad5 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/LockManagerMapper.xml +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/LockManagerMapper.xml @@ -69,7 +69,21 @@ + + insert into linkis_cg_manager_lock (lock_object, time_out, update_time, create_time) + values(#{lockObject}, #{timeOut}, now(), now()) + + + delete from linkis_cg_manager_lock where id = #{id} + + + + \ No newline at end of file diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/NodeMetricManagerMapper.xml b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/NodeMetricManagerMapper.xml index 0797735aae8..569bb838a88 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/NodeMetricManagerMapper.xml +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/NodeMetricManagerMapper.xml @@ -20,57 +20,63 @@ - - - - - - - - - + + + + + + + + + + - - - - + + insert into linkis_cg_manager_service_instance_metrics (instance, instance_status, overload, heartbeat_msg,healthy_status,update_time,create_time) + values(#{nodeMetrics.instance},#{nodeMetrics.status},#{nodeMetrics.overLoad},#{nodeMetrics.heartBeatMsg},#{nodeMetrics.healthy},now(),now()) + - - - - - - - - - - - + + + - + - - - + + update linkis_cg_manager_service_instance_metrics + + instance_status = #{nodeMetrics.status}, + overload = #{nodeMetrics.overLoad}, + heartbeat_msg = #{nodeMetrics.heartBeatMsg}, + healthy_status=#{nodeMetrics.healthy}, + update_time=#{nodeMetrics.updateTime}, + where instance = #{instance} + + + delete from linkis_cg_manager_service_instance_metrics where instance in + (select instance from linkis_cg_manager_service_instance where instance=#{instance}) + - - - - - - - - - - - - - - - - + + delete from linkis_cg_manager_service_instance_metrics where instance = #{instance} + + \ No newline at end of file diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/ResourceManagerMapper.xml b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/ResourceManagerMapper.xml index 09e40887bd7..c5b3a6d9e3f 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/ResourceManagerMapper.xml +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/impl/ResourceManagerMapper.xml @@ -38,5 +38,64 @@ SELECT * FROM linkis_cg_manager_linkis_resources WHERE id = #{id} + + INSERT INTO linkis_cg_manager_linkis_resources VALUES + (#{id},#{maxResource},#{minResource},#{usedResource},#{leftResource},#{expectedResource},#{lockedResource},#{resourceType},#{ticketId},now(),now(),#{updator},#{creator}) + + + update linkis_cg_manager_linkis_resources set max_resource = #{persistenceResource.maxResource},min_resource = #{persistenceResource.minResource}, + used_resource=#{persistenceResource.usedResource},left_resource=#{persistenceResource.leftResource},expected_resource=#{persistenceResource.expectedResource}, + locked_resource=#{persistenceResource.lockedResource}, + update_time=#{persistenceResource.updateTime} where ticketId = #{ticketId} + + + + update linkis_cg_manager_linkis_resources set max_resource = #{persistenceResource.maxResource},min_resource = #{persistenceResource.minResource}, + used_resource=#{persistenceResource.usedResource},left_resource=#{persistenceResource.leftResource},expected_resource=#{persistenceResource.expectedResource}, + locked_resource=#{persistenceResource.lockedResource}, + update_time=#{persistenceResource.updateTime} where id = #{resourceId} + + + + + + delete from linkis_cg_manager_label_resource where label_id in + (select label_id from linkis_cg_manager_label_service_instance where service_instance=#{instance}) + + + + delete from linkis_cg_manager_linkis_resources where id in + (select resource_id from linkis_cg_manager_label_resource A join linkis_cg_manager_label_service_instance B on A.label_id=B.label_id and B.service_instance = #{instance} ) + + + select * from linkis_cg_manager_linkis_resources where resourceType = #{resourceType} and + id in (select resource_id from linkis_cg_manager_label_resource A join linkis_cg_manager_label_service_instance B on A.label_id = B.label_id and B.service_instance=#{instance}) + + + + + + + + + + \ No newline at end of file diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java index 15a578c7ff9..293c213f437 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java @@ -77,7 +77,11 @@ public void addNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceErrorExcep @Override public void addOrupdateNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceErrorException { - PersistenceNodeMetrics persistenceNodeMetrics = new PersistenceNodeMetrics(); + if (null == nodeMetrics.getServiceInstance()) { + logger.warn( + "The request of update node metrics was ignored, because the node metrics service instance is null"); + return; + } String instance = nodeMetrics.getServiceInstance().getInstance(); // todo 异常信息后面统一处理 PersistenceNode node = nodeManagerMapper.getNodeInstance(instance); @@ -90,6 +94,7 @@ public void addOrupdateNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceEr } int isInstanceIdExist = nodeMetricManagerMapper.checkInstanceExist(instance); // 是否存在 + PersistenceNodeMetrics persistenceNodeMetrics = new PersistenceNodeMetrics(); if (isInstanceIdExist == 0) { persistenceNodeMetrics.setInstance(nodeMetrics.getServiceInstance().getInstance()); persistenceNodeMetrics.setHealthy(nodeMetrics.getHealthy()); diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/Scan.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/Scan.java index 177a3be4969..4f931b7056d 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/Scan.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/Scan.java @@ -17,27 +17,11 @@ package org.apache.linkis.manager; -import org.apache.linkis.DataWorkCloudApplication; - -import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.context.annotation.*; -import org.springframework.jdbc.core.JdbcTemplate; - -import javax.sql.DataSource; -@ComponentScan( - value = "org.apache.linkis", - excludeFilters = - @ComponentScan.Filter( - type = FilterType.ASSIGNABLE_TYPE, - value = DataWorkCloudApplication.class)) -@Configuration -@EnableAspectJAutoProxy -public class Scan { - @Autowired private DataSource dataSource; +import org.mybatis.spring.annotation.MapperScan; - @Bean - public JdbcTemplate getJdbcTemplate() { - return new JdbcTemplate(dataSource); - } -} +@EnableAutoConfiguration +@MapperScan("org.apache.linkis.manager") +public class Scan {} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/WebApplicationServer.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/WebApplicationServer.java new file mode 100644 index 00000000000..85b9eb53adf --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/WebApplicationServer.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager; + +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.web.servlet.ServletComponentScan; +import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; +import org.springframework.context.annotation.ComponentScan; + +@EnableAutoConfiguration +@ServletComponentScan +@ComponentScan +public class WebApplicationServer extends SpringBootServletInitializer { + + public static void main(String[] args) { + new SpringApplicationBuilder(WebApplicationServer.class).run(args); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/BaseDaoTest.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/BaseDaoTest.java new file mode 100644 index 00000000000..2e768ed3655 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/BaseDaoTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.dao; + +import org.apache.linkis.manager.Scan; + +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; +import org.springframework.transaction.annotation.EnableTransactionManagement; +import org.springframework.transaction.annotation.Transactional; + +@SpringBootTest(classes = Scan.class) +@Transactional +@Rollback(true) +@EnableTransactionManagement +public class BaseDaoTest {} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/LockManagerMapperTest.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/LockManagerMapperTest.java new file mode 100644 index 00000000000..df7f8591ba6 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/LockManagerMapperTest.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.dao; + +import org.apache.linkis.manager.common.entity.persistence.PersistenceLock; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + +class LockManagerMapperTest extends BaseDaoTest { + + @Autowired LockManagerMapper lockManagerMapper; + + @Test + void lock() { + lockManagerMapper.lock("testjson", 1L); + List list = lockManagerMapper.getLockersByLockObject("testjson"); + assertTrue(list.size() >= 1); + } + + @Test + void unlock() { + lockManagerMapper.unlock(1); + } + + @Test + void getLockersByLockObject() { + lock(); + List list = lockManagerMapper.getLockersByLockObject("testjson"); + assertTrue(list.size() >= 1); + } + + @Test + void getAll() { + lock(); + List list = lockManagerMapper.getAll(); + assertTrue(list.size() >= 1); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeManagerMapperTest.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeManagerMapperTest.java new file mode 100644 index 00000000000..2b3e59929e8 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeManagerMapperTest.java @@ -0,0 +1,181 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.dao; + +import org.apache.linkis.manager.common.entity.persistence.PersistenceNode; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + +class NodeManagerMapperTest extends BaseDaoTest { + + @Autowired NodeManagerMapper nodeManagerMapper; + + @Test + void addNodeInstance() { + PersistenceNode persistenceNode = new PersistenceNode(); + persistenceNode.setInstance("instance2"); + persistenceNode.setName("testname2"); + persistenceNode.setOwner("testowner2"); + persistenceNode.setMark("testmark2"); + persistenceNode.setUpdator("testupdator2"); + persistenceNode.setCreator("testcreator2"); + nodeManagerMapper.addNodeInstance(persistenceNode); + PersistenceNode persistenceNodes = nodeManagerMapper.getNodeInstance("instance2"); + assertTrue(persistenceNodes != null); + } + + @Test + void updateNodeInstance() { + addNodeInstance(); + PersistenceNode persistenceNode = new PersistenceNode(); + persistenceNode.setInstance("instance3"); + persistenceNode.setName("testname3"); + persistenceNode.setOwner("testowner3"); + persistenceNode.setMark("testmark3"); + persistenceNode.setUpdator("testupdator3"); + persistenceNode.setCreator("testcreator3"); + nodeManagerMapper.updateNodeInstance("instance2", persistenceNode); + PersistenceNode persistenceNodes = nodeManagerMapper.getNodeInstance("instance3"); + assertTrue(persistenceNodes != null); + } + + @Test + void removeNodeInstance() { + addNodeInstance(); + nodeManagerMapper.removeNodeInstance("instance2"); + PersistenceNode persistenceNodes = nodeManagerMapper.getNodeInstance("instance2"); + assertTrue(persistenceNodes == null); + } + + @Test + void getNodeInstancesByOwner() { + addNodeInstance(); + List list = nodeManagerMapper.getNodeInstancesByOwner("testowner2"); + assertTrue(list.size() >= 1); + } + + @Test + void getAllNodes() { + addNodeInstance(); + List list = nodeManagerMapper.getAllNodes(); + assertTrue(list.size() >= 1); + } + + @Test + void updateNodeInstanceOverload() { + addNodeInstance(); + PersistenceNode persistenceNode = new PersistenceNode(); + persistenceNode.setInstance("instance2"); + persistenceNode.setName("testname3"); + persistenceNode.setOwner("testowner3"); + persistenceNode.setMark("testmark3"); + persistenceNode.setUpdator("testupdator3"); + persistenceNode.setCreator("testcreator3"); + nodeManagerMapper.updateNodeInstanceOverload(persistenceNode); + PersistenceNode persistenceNodes = nodeManagerMapper.getNodeInstance("instance2"); + assertTrue(persistenceNode.getName().equals(persistenceNodes.getName())); + } + + @Test + void getNodeInstanceId() { + addNodeInstance(); + int i = nodeManagerMapper.getNodeInstanceId("instance2"); + assertTrue(i >= 1); + } + + @Test + void getIdByInstance() {} + + @Test + void getNodeInstanceIds() { + addNodeInstance(); + List stringList = new ArrayList<>(); + stringList.add("instance1"); + stringList.add("instance2"); + List list = nodeManagerMapper.getNodeInstanceIds(stringList); + assertTrue(list.size() >= 1); + } + + @Test + void getNodeInstance() { + PersistenceNode persistenceNode = nodeManagerMapper.getNodeInstance("instance1"); + assertTrue(persistenceNode != null); + } + + @Test + void getNodeInstanceById() { + PersistenceNode persistenceNode = nodeManagerMapper.getNodeInstanceById(1); + assertTrue(persistenceNode != null); + } + + @Test + void getEMNodeInstanceByEngineNode() { + PersistenceNode persistenceNode = nodeManagerMapper.getEMNodeInstanceByEngineNode("instance1"); + assertTrue(persistenceNode != null); + } + + @Test + void getNodeInstances() { + List list = nodeManagerMapper.getNodeInstances("instance1"); + assertTrue(list.size() >= 1); + } + + @Test + void getNodesByInstances() { + addNodeInstance(); + List stringList = new ArrayList<>(); + stringList.add("instance1"); + stringList.add("instance2"); + List list = nodeManagerMapper.getNodesByInstances(stringList); + assertTrue(list.size() >= 1); + } + + @Test + void addEngineNode() { + nodeManagerMapper.addEngineNode("instance1", "instance1"); + } + + @Test + void deleteEngineNode() { + nodeManagerMapper.deleteEngineNode("instance1", "instance1"); + } + + @Test + void getNodeInstanceIdsByOwner() { + addNodeInstance(); + List list = nodeManagerMapper.getNodeInstanceIdsByOwner("testowner2"); + assertTrue(list.size() >= 1); + } + + @Test + void updateNodeRelation() { + nodeManagerMapper.updateNodeRelation("instance1", "instance2"); + } + + @Test + void updateNodeLabelRelation() { + nodeManagerMapper.updateNodeLabelRelation("instance1", "instance2"); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeMetricManagerMapperTest.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeMetricManagerMapperTest.java new file mode 100644 index 00000000000..29d08b0e12d --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeMetricManagerMapperTest.java @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.dao; + +import org.apache.linkis.manager.common.entity.persistence.PersistenceNodeMetrics; +import org.apache.linkis.manager.common.entity.persistence.PersistenceNodeMetricsEntity; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + +class NodeMetricManagerMapperTest extends BaseDaoTest { + + @Autowired NodeMetricManagerMapper nodeMetricManagerMapper; + + @Test + void addNodeMetrics() { + PersistenceNodeMetrics nodeMetrics = new PersistenceNodeMetrics(); + nodeMetrics.setInstance("instance1"); + nodeMetrics.setStatus(1); + nodeMetrics.setOverLoad("testoverload"); + nodeMetrics.setHeartBeatMsg("testheartbeat_msg"); + nodeMetrics.setHealthy("2"); + nodeMetricManagerMapper.addNodeMetrics(nodeMetrics); + PersistenceNodeMetrics persistenceNodeMetrics = + nodeMetricManagerMapper.getNodeMetricsByInstance("instance1"); + assertTrue(persistenceNodeMetrics != null); + } + + @Test + void checkInstanceExist() { + addNodeMetrics(); + int i = nodeMetricManagerMapper.checkInstanceExist("instance1"); + assertTrue(i >= 1); + } + + @Test + void getNodeMetricsByInstances() { + addNodeMetrics(); + List list = new ArrayList<>(); + list.add("instance1"); + List persistenceNodeMetrics = + nodeMetricManagerMapper.getNodeMetricsByInstances(list); + assertTrue(persistenceNodeMetrics.size() >= 1); + } + + @Test + void getNodeMetricsByInstance() { + addNodeMetrics(); + PersistenceNodeMetrics persistenceNodeMetrics = + nodeMetricManagerMapper.getNodeMetricsByInstance("instance1"); + assertTrue(persistenceNodeMetrics != null); + } + + @Test + void updateNodeMetrics() { + addNodeMetrics(); + PersistenceNodeMetrics nodeMetrics = new PersistenceNodeMetrics(); + nodeMetrics.setStatus(2); + nodeMetrics.setOverLoad("testoverloads"); + nodeMetrics.setHeartBeatMsg("testheartbeat_msgs"); + nodeMetrics.setHealthy("2s"); + nodeMetricManagerMapper.updateNodeMetrics(nodeMetrics, "instance1"); + PersistenceNodeMetrics persistenceNodeMetrics = + nodeMetricManagerMapper.getNodeMetricsByInstance("instance1"); + assertTrue(persistenceNodeMetrics.getOverLoad().equals("testoverloads")); + } + + @Test + void deleteNodeMetrics() { + addNodeMetrics(); + nodeMetricManagerMapper.deleteNodeMetrics("instance1"); + PersistenceNodeMetrics persistenceNodeMetrics = + nodeMetricManagerMapper.getNodeMetricsByInstance("instance1"); + assertTrue(persistenceNodeMetrics == null); + } + + @Test + void deleteNodeMetricsByInstance() { + addNodeMetrics(); + nodeMetricManagerMapper.deleteNodeMetricsByInstance("instance1"); + PersistenceNodeMetrics persistenceNodeMetrics = + nodeMetricManagerMapper.getNodeMetricsByInstance("instance1"); + assertTrue(persistenceNodeMetrics == null); + } + + @Test + void getAllNodeMetrics() { + addNodeMetrics(); + List list = nodeMetricManagerMapper.getAllNodeMetrics(); + assertTrue(list.size() >= 1); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/ResourceManagerMapperTest.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/ResourceManagerMapperTest.java new file mode 100644 index 00000000000..be8b1715136 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/ResourceManagerMapperTest.java @@ -0,0 +1,184 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.dao; + +import org.apache.linkis.manager.common.entity.persistence.PersistenceLabel; +import org.apache.linkis.manager.common.entity.persistence.PersistenceResource; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class ResourceManagerMapperTest extends BaseDaoTest { + + @Autowired ResourceManagerMapper resourceManagerMapper; + + @Test + void registerResource() { + PersistenceResource persistenceResource = new PersistenceResource(); + persistenceResource.setId(1); + persistenceResource.setMaxResource("testmax"); + persistenceResource.setMinResource("mintest"); + persistenceResource.setLeftResource("left"); + persistenceResource.setUsedResource("user"); + persistenceResource.setTicketId("1"); + persistenceResource.setResourceType("testtype"); + resourceManagerMapper.registerResource(persistenceResource); + PersistenceResource persistenceResources = resourceManagerMapper.getResourceById(1); + assertThat(persistenceResources.getId()) + .usingRecursiveComparison() + .isEqualTo(persistenceResource.getId()); + } + + @Test + void nodeResourceUpdate() { + registerResource(); + PersistenceResource persistenceResource = new PersistenceResource(); + persistenceResource.setId(1); + persistenceResource.setMaxResource("testmaxss"); + persistenceResource.setMinResource("mintestss"); + persistenceResource.setLeftResource("left"); + persistenceResource.setUsedResource("user"); + persistenceResource.setResourceType("testtype"); + persistenceResource.setUpdateTime(new Date()); + resourceManagerMapper.nodeResourceUpdate("1", persistenceResource); + PersistenceResource persistenceResources = resourceManagerMapper.getResourceById(1); + assertTrue(persistenceResources.getMaxResource() == persistenceResource.getMaxResource()); + } + + @Test + void nodeResourceUpdateByResourceId() { + registerResource(); + PersistenceResource persistenceResource = new PersistenceResource(); + persistenceResource.setId(1); + persistenceResource.setMaxResource("testmaxss"); + persistenceResource.setMinResource("mintestss"); + persistenceResource.setLeftResource("left"); + persistenceResource.setUsedResource("user"); + resourceManagerMapper.nodeResourceUpdateByResourceId(1, persistenceResource); + assertTrue(persistenceResource.getMaxResource() == persistenceResource.getMaxResource()); + } + + @Test + void getNodeResourceUpdateResourceId() { + PersistenceResource persistenceResource = new PersistenceResource(); + persistenceResource.setId(1); + persistenceResource.setMaxResource("testmax"); + persistenceResource.setMinResource("mintest"); + persistenceResource.setLeftResource("left"); + persistenceResource.setUsedResource("user"); + persistenceResource.setResourceType("testtype"); + resourceManagerMapper.registerResource(persistenceResource); + Integer i = resourceManagerMapper.getNodeResourceUpdateResourceId("instance1"); + assertTrue(i >= 1); + } + + @Test + void deleteResourceAndLabelId() { + resourceManagerMapper.deleteResourceAndLabelId("instance1"); + } + + @Test + void deleteResourceByInstance() { + registerResource(); + resourceManagerMapper.deleteResourceByInstance("instance1"); + List list = + resourceManagerMapper.getResourceByServiceInstance("instance1"); + assertTrue(list.size() == 0); + } + + @Test + void deleteResourceByTicketId() { + registerResource(); + resourceManagerMapper.deleteResourceByTicketId("1"); + PersistenceResource persistenceResource = resourceManagerMapper.getNodeResourceByTicketId("1"); + assertTrue(persistenceResource == null); + } + + @Test + void getResourceByInstanceAndResourceType() { + registerResource(); + List list = + resourceManagerMapper.getResourceByInstanceAndResourceType("instance1", "testtype"); + int i = list.size(); + assertTrue(i == 1); + } + + @Test + void getResourceByServiceInstance() { + registerResource(); + List list = + resourceManagerMapper.getResourceByServiceInstance("instance1"); + int i = list.size(); + assertTrue(i == 1); + } + + @Test + void getNodeResourceByTicketId() { + registerResource(); + PersistenceResource persistenceResource = resourceManagerMapper.getNodeResourceByTicketId("1"); + assertTrue(persistenceResource != null); + } + + @Test + void getResourceByUserName() { + registerResource(); + List list = resourceManagerMapper.getResourceByUserName("testname"); + int i = list.size(); + assertTrue(i == 1); + } + + @Test + void getLabelsByTicketId() { + registerResource(); + List list = resourceManagerMapper.getLabelsByTicketId("1"); + int i = list.size(); + assertTrue(i == 1); + } + + @Test + void deleteResourceById() { + registerResource(); + List list = new ArrayList<>(); + list.add(1); + resourceManagerMapper.deleteResourceById(list); + PersistenceResource persistenceResource = resourceManagerMapper.getResourceById(1); + assertTrue(persistenceResource == null); + } + + @Test + void deleteResourceRelByResourceId() { + List list = new ArrayList<>(); + list.add(1); + resourceManagerMapper.deleteResourceRelByResourceId(list); + } + + @Test + void getResourceById() { + registerResource(); + PersistenceResource persistenceResource = resourceManagerMapper.getResourceById(1); + assertTrue(persistenceResource != null); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/persistence/PersistenceTest.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/persistence/PersistenceTest.java deleted file mode 100644 index 41d4197866d..00000000000 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/persistence/PersistenceTest.java +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.manager.persistence; - -import org.apache.linkis.common.ServiceInstance; -import org.apache.linkis.manager.Scan; -import org.apache.linkis.manager.common.entity.label.LabelKeyValue; -import org.apache.linkis.manager.common.entity.persistence.PersistenceLabel; -import org.apache.linkis.manager.common.entity.persistence.PersistenceResource; -import org.apache.linkis.manager.dao.LabelManagerMapper; -import org.apache.linkis.manager.entity.Tunple; -import org.apache.linkis.manager.label.entity.Label; - -import org.apache.commons.beanutils.BeanUtils; - -import org.springframework.context.annotation.AnnotationConfigApplicationContext; - -import java.lang.reflect.InvocationTargetException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class PersistenceTest { - - AnnotationConfigApplicationContext context = null; - - LabelManagerMapper labelManagerMapper = null; - - ResourceLabelPersistence resourceLabelPersistence = null; - - @BeforeEach - public void before() { - context = new AnnotationConfigApplicationContext(Scan.class); - labelManagerMapper = context.getBean(LabelManagerMapper.class); - resourceLabelPersistence = context.getBean(ResourceLabelPersistence.class); - } - - @Test - public void test01() throws InvocationTargetException, IllegalAccessException { - - PersistenceLabel persistenceLabel1 = new PersistenceLabel(); - persistenceLabel1.setLabelKey("engineType"); - persistenceLabel1.setValue(Collections.singletonMap("runType", "spark")); - System.out.println(persistenceLabel1.getStringValue()); - PersistenceLabel persistenceLabel2 = new PersistenceLabel(); - persistenceLabel2.setLabelKey("engineType"); - persistenceLabel2.setValue(Collections.singletonMap("runType", "hive")); - System.out.println(persistenceLabel2.getStringValue()); - List persistenceLabels = Arrays.asList(persistenceLabel1); - List> nodeRelationsByLabels = - labelManagerMapper.dimListNodeRelationsByKeyValueMap( - Collections.singletonMap(persistenceLabel1.getLabelKey(), persistenceLabel1.getValue()), - Label.ValueRelation.ALL.name()); - List> arrays = - new ArrayList>(); - for (Map nodeRelationsByLabel : nodeRelationsByLabels) { - ServiceInstance serviceInstance = new ServiceInstance(); - PersistenceLabel persistenceLabel = new PersistenceLabel(); - BeanUtils.populate(serviceInstance, nodeRelationsByLabel); - BeanUtils.populate(persistenceLabel, nodeRelationsByLabel); - arrays.add(new Tunple(persistenceLabel, serviceInstance)); - } - Map> collect = - arrays.stream() - .collect(Collectors.groupingBy(Tunple::getKey)) - .entrySet() - .stream() - .collect( - Collectors.toMap( - Map.Entry::getKey, - f -> f.getValue().stream().map(Tunple::getValue).collect(Collectors.toList()))); - System.out.println(collect); - } - - @Test - public void test02() throws InvocationTargetException, IllegalAccessException { - - List list = - Arrays.asList( - ServiceInstance.apply("aaa", "localhost:8088"), - ServiceInstance.apply("bbb", "localhost:8089")); - List> nodeRelationsByLabels = - labelManagerMapper.listLabelRelationByServiceInstance(list); - List> arrays = new ArrayList<>(); - for (Map nodeRelationsByLabel : nodeRelationsByLabels) { - ServiceInstance serviceInstance = new ServiceInstance(); - PersistenceLabel persistenceLabel = new PersistenceLabel(); - BeanUtils.populate(serviceInstance, nodeRelationsByLabel); - BeanUtils.populate(persistenceLabel, nodeRelationsByLabel); - arrays.add(new Tunple(serviceInstance, persistenceLabel)); - } - Map> collect = - arrays.stream() - .collect(Collectors.groupingBy(Tunple::getKey)) - .entrySet() - .stream() - .collect( - Collectors.toMap( - Map.Entry::getKey, - f -> f.getValue().stream().map(Tunple::getValue).collect(Collectors.toList()))); - System.out.println(collect); - } - - @Test - public void test03() throws InvocationTargetException, IllegalAccessException { - - List nodeByLabelKeyValue = - labelManagerMapper.getNodeByLabelKeyValue("serverAlias", "em"); - System.out.println(nodeByLabelKeyValue); - } - - @Test - public void testListResourceLabelByValueList() - throws InvocationTargetException, IllegalAccessException { - /*SELECT l.*,lvr.* - FROM linkis_cg_manager_label l ,linkis_cg_manager_label_resource lr ,linkis_cg_manager_label_value_relation lvr - WHERE l.id = lr.label_id AND l.id = lvr.label_id - AND (lvr.label_value_key,lvr.label_value_content) IN (('alias','em'),('key2','value2'),("instance","localhost:9000"),("serviceName","sparkEngine")) - GROUP BY l.id HAVING COUNT(1) = l.label_value_size;*/ - ArrayList labelKeyValues = new ArrayList<>(); - labelKeyValues.add(new LabelKeyValue("alias", "em")); - labelKeyValues.add(new LabelKeyValue("key2", "value2")); - List persistenceLabels = - labelManagerMapper.listResourceLabelByValues(labelKeyValues); - System.out.println(persistenceLabels); - } - - @Test - public void testListResourceLabelByKeyValuesMaps() - throws InvocationTargetException, IllegalAccessException { - HashMap> labelKeyValues = new HashMap<>(); - HashMap key1 = new HashMap<>(); - key1.put("instance", "localhost:9026"); - key1.put("serviceName", "linkis-engineManager"); - labelKeyValues.put("emInstance", key1); - HashMap key2 = new HashMap<>(); - key2.put("instance", "localhost:9000"); - key2.put("serviceName", "sparkEngine"); - labelKeyValues.put("engineInstance", key2); - List persistenceLabels = - labelManagerMapper.dimlistResourceLabelByKeyValueMap( - labelKeyValues, Label.ValueRelation.ALL.name()); - System.out.println(persistenceLabels); - } - - @Test - public void testGetResourceByLaBelId() { - List persistenceResources = labelManagerMapper.listResourceByLaBelId(1); - System.out.println(persistenceResources); - } - - @Test - public void testGetResourceByKeyValuesMaps() { - HashMap> labelKeyValues = new HashMap<>(); - HashMap key1 = new HashMap<>(); - key1.put("alias", "em"); - key1.put("key2", "value2"); - labelKeyValues.put("serverAlias", key1); - HashMap key2 = new HashMap<>(); - key2.put("instance", "localhost:9000"); - key2.put("serviceName", "sparkEngine"); - labelKeyValues.put("engineInstance", key2); - List persistenceLabels = - labelManagerMapper.dimlistResourceLabelByKeyValueMap( - labelKeyValues, Label.ValueRelation.ALL.name()); - System.out.println(persistenceLabels); - } - - @Test - public void testDeleteResourceByLabelId() { - labelManagerMapper.deleteResourceByLabelId(1); - } - - @Test - public void testDeleteResourceByLabelKeyValuesMaps() { - HashMap> labelKeyValues = new HashMap<>(); - HashMap key1 = new HashMap<>(); - key1.put("alias", "em"); - key1.put("key2", "value2"); - labelKeyValues.put("serverAlias", key1); - } - - @Test - public void testBatchDeleteResourceByLabelId() { - labelManagerMapper.batchDeleteResourceByLabelId(Arrays.asList(3, 4)); - labelManagerMapper.batchDeleteResourceByLabelIdInDirect(Arrays.asList(3, 4)); - } - - @Test - public void testBatchDeleteResourceByLabelKeyValuesMaps() { - HashMap> labelKeyValues = new HashMap<>(); - HashMap key1 = new HashMap<>(); - key1.put("alias", "em"); - key1.put("key2", "value2"); - labelKeyValues.put("serverAlias", key1); - HashMap key2 = new HashMap<>(); - key2.put("instance", "localhost:9000"); - key2.put("serviceName", "sparkEngine"); - labelKeyValues.put("engineInstance", key2); - labelManagerMapper.batchDeleteResourceByLabelKeyValuesMaps(labelKeyValues); - } - - @Test - public void testSetResource01() { - HashMap> labelKeyValues = new HashMap<>(); - HashMap key1 = new HashMap<>(); - key1.put("alias", "em"); - key1.put("key2", "value2"); - labelKeyValues.put("serverAlias", key1); - HashMap key2 = new HashMap<>(); - key2.put("instance", "localhost:9000"); - key2.put("serviceName", "sparkEngine"); - labelKeyValues.put("engineInstance", key2); - PersistenceResource persistenceResource = new PersistenceResource(); - persistenceResource.setMaxResource("100"); - PersistenceLabel persistenceLabel = new PersistenceLabel(); - persistenceLabel.setId(3); - resourceLabelPersistence.setResourceToLabel(persistenceLabel, persistenceResource); - } - - @Test - public void testSetResource02() { - HashMap> labelKeyValues = new HashMap<>(); - HashMap key1 = new HashMap<>(); - key1.put("alias", "em"); - key1.put("key2", "value2"); - labelKeyValues.put("serverAlias", key1); - HashMap key2 = new HashMap<>(); - key2.put("instance", "localhost:9000"); - key2.put("serviceName", "sparkEngine"); - labelKeyValues.put("engineInstance", key2); - PersistenceResource persistenceResource = new PersistenceResource(); - persistenceResource.setMaxResource("300"); - PersistenceLabel persistenceLabel = new PersistenceLabel(); - persistenceLabel.setValue(key1); - persistenceLabel.setLabelKey("serverAlias"); - resourceLabelPersistence.setResourceToLabel(persistenceLabel, persistenceResource); - } - - @Test - public void testdimListLabelByValueList() - throws InvocationTargetException, IllegalAccessException { - HashMap stringStringHashMap = new HashMap<>(); - stringStringHashMap.put("type", "spark"); - // stringStringHashMap.put("version","2.4.3"); - stringStringHashMap.put("aaa", "bbb"); - List persistenceLabels = - labelManagerMapper.dimListLabelByValueList( - Arrays.asList(stringStringHashMap), Label.ValueRelation.OR.name()); - System.out.println(persistenceLabels.size()); - } - - @Test - public void testdimListLabelsByKeyValueMap() - throws InvocationTargetException, IllegalAccessException { - HashMap stringStringHashMap = new HashMap<>(); - stringStringHashMap.put("type", "spark"); - // stringStringHashMap.put("version","2.4.3"); - stringStringHashMap.put("aaa", "bbb"); - List persistenceLabels = - labelManagerMapper.dimListLabelByKeyValueMap( - Collections.singletonMap("combined_xx_xx", stringStringHashMap), - Label.ValueRelation.AND.name()); - System.out.println(persistenceLabels.size()); - } - - @Test - public void testListLabelByKeyValueMap() - throws InvocationTargetException, IllegalAccessException { - HashMap stringStringHashMap = new HashMap<>(); - stringStringHashMap.put("type", "spark"); - stringStringHashMap.put("version", "2.4.3"); - stringStringHashMap.put("aaa", "bbb"); - stringStringHashMap.put("ccc", "ddd"); - stringStringHashMap.put("eee", "fff"); - List persistenceLabels = - labelManagerMapper.listLabelByKeyValueMap( - Collections.singletonMap("combined_xx_xx", stringStringHashMap)); - System.out.println(persistenceLabels.size()); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/application.properties b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/application.properties new file mode 100644 index 00000000000..ca7d27a236a --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/application.properties @@ -0,0 +1,41 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + + +#h2 database config +spring.datasource.driver-class-name=org.h2.Driver +#init +spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript from 'classpath:create.sql' +spring.datasource.username=sa +spring.datasource.password= +spring.datasource.hikari.connection-test-query=select 1 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.auto-commit=true +spring.datasource.hikari.validation-timeout=3000 +spring.datasource.hikari.pool-name=linkis-test +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.leak-detection-threshold=0 +spring.datasource.hikari.initialization-fail-timeout=1 + +mybatis-plus.mapper-locations=org/apache/linkis/manager/dao/impl/*.xml +mybatis-plus.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl + +#disable eureka discovery client +spring.cloud.service-registry.auto-registration.enabled=false +eureka.client.enabled=false +eureka.client.serviceUrl.registerWithEureka=false \ No newline at end of file diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/create.sql b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/create.sql new file mode 100644 index 00000000000..8e31933025d --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/create.sql @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE TABLE `linkis_cg_manager_linkis_resources` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `max_resource` varchar(1020) DEFAULT NULL, + `min_resource` varchar(1020) DEFAULT NULL, + `used_resource` varchar(1020) DEFAULT NULL, + `left_resource` varchar(1020) DEFAULT NULL, + `expected_resource` varchar(1020) DEFAULT NULL, + `locked_resource` varchar(1020) DEFAULT NULL, + `resourceType` varchar(255) DEFAULT NULL, + `ticketId` varchar(255) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `updator` varchar(255) DEFAULT NULL, + `creator` varchar(255) DEFAULT NULL, + PRIMARY KEY (`id`) +); + + +CREATE TABLE `linkis_cg_manager_label_resource` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_id` int(20) DEFAULT NULL, + `resource_id` int(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +); + +INSERT INTO linkis_cg_manager_label_resource (label_id, resource_id, update_time, create_time) VALUES(2, 1, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP); + +CREATE TABLE `linkis_cg_manager_label_service_instance` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_id` int(20) DEFAULT NULL, + `service_instance` varchar(128) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +); + +INSERT INTO linkis_cg_manager_label_service_instance(label_id, service_instance, update_time, create_time) VALUES (2, 'instance1', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP); + +CREATE TABLE `linkis_cg_manager_label_user` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `username` varchar(255) DEFAULT NULL, + `label_id` int(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ; + +INSERT INTO linkis_cg_manager_label_user(username, label_id, update_time, create_time)VALUES('testname', 2, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP); + +CREATE TABLE `linkis_cg_manager_label` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_key` varchar(32) NOT NULL, + `label_value` varchar(255) NOT NULL, + `label_feature` varchar(16) NOT NULL, + `label_value_size` int(20) NOT NULL, + `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +); +INSERT INTO linkis_cg_manager_label (id,label_key,label_value,label_feature,label_value_size,update_time,create_time) VALUES (2,'combined_userCreator_engineType','*-LINKISCLI,*-*','OPTIONAL',2,'2022-03-28 01:31:08.0','2022-03-28 01:31:08.0'); + +CREATE TABLE `linkis_cg_manager_service_instance_metrics` ( + `instance` varchar(128) NOT NULL, + `instance_status` int(11) DEFAULT NULL, + `overload` varchar(255) DEFAULT NULL, + `heartbeat_msg` text , + `healthy_status` varchar(255) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`instance`) +) ; + +CREATE TABLE `linkis_cg_manager_service_instance` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `instance` varchar(128) DEFAULT NULL, + `name` varchar(32) DEFAULT NULL, + `owner` varchar(32) DEFAULT NULL, + `mark` varchar(32) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `updator` varchar(32) DEFAULT NULL, + `creator` varchar(32) DEFAULT NULL, + PRIMARY KEY (`id`) +); +INSERT INTO linkis_cg_manager_service_instance(`instance`, name, owner, mark, update_time, create_time, updator, creator)VALUES('instance1', 'testname', 'testowner', 'testmark', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 'testupdator', 'testcreator'); + +CREATE TABLE `linkis_cg_manager_engine_em` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `engine_instance` varchar(128) DEFAULT NULL, + `em_instance` varchar(128) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ; + +INSERT INTO linkis_cg_manager_engine_em (engine_instance,em_instance,update_time,create_time) VALUES ('instance1','instance1','2022-02-26 14:54:05.0','2022-02-26 14:54:05.0'); + + +CREATE TABLE `linkis_cg_manager_lock` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `lock_object` varchar(255) DEFAULT NULL, + `time_out` longtext , + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +); +INSERT INTO linkis_cg_manager_lock(lock_object, time_out, update_time, create_time)VALUES('testjson', 1l, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP); \ No newline at end of file diff --git a/linkis-computation-governance/linkis-manager/pom.xml b/linkis-computation-governance/linkis-manager/pom.xml index 031da4782ea..a3f147047d9 100644 --- a/linkis-computation-governance/linkis-manager/pom.xml +++ b/linkis-computation-governance/linkis-manager/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-manager @@ -33,6 +34,19 @@ linkis-application-manager + + + org.apache.linkis + linkis-engineconn-plugin-server + ${project.version} + + + org.apache.linkis + linkis-application-manager + ${project.version} + + + ${project.artifactId}-${project.version} @@ -53,13 +67,34 @@ org.apache.maven.plugins maven-jar-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + false - - - - - + false + out + false + false + + distribution.xml + + + + make-assembly + + single + + package + + + distribution.xml + + + + diff --git a/linkis-dist/bin/checkEnv.sh b/linkis-dist/bin/checkEnv.sh index 1acd53f986a..0af9b12d671 100644 --- a/linkis-dist/bin/checkEnv.sh +++ b/linkis-dist/bin/checkEnv.sh @@ -124,24 +124,12 @@ check_service_port SERVER_PORT=$ENGINECONNMANAGER_PORT check_service_port -SERVER_PORT=$ENGINECONN_PLUGIN_SERVER_PORT -check_service_port - SERVER_PORT=$ENTRANCE_PORT check_service_port SERVER_PORT=$PUBLICSERVICE_PORT check_service_port -SERVER_PORT=$CS_PORT -check_service_port - -SERVER_PORT=$DATASOURCE_MANAGER_PORT -check_service_port - -SERVER_PORT=$METADATA_QUERY_PORT -check_service_port - if [ "$portIsOccupy" = true ];then echo "The port is already in use, please check before installing" exit 1 diff --git a/linkis-dist/bin/common.sh b/linkis-dist/bin/common.sh index f6f2f7a2435..be1717156f7 100644 --- a/linkis-dist/bin/common.sh +++ b/linkis-dist/bin/common.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/bin/install-linkis-to-kubernetes.sh b/linkis-dist/bin/install-linkis-to-kubernetes.sh new file mode 100644 index 00000000000..dbc54de7008 --- /dev/null +++ b/linkis-dist/bin/install-linkis-to-kubernetes.sh @@ -0,0 +1,215 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +#variable +WORK_DIR=`cd $(dirname $0); pwd -P` +ROOT_DIR=${WORK_DIR}/.. +MIRRORS="ghcr.io" +TAG="latest" +COMMAND="pull-install" +DEBUG=false +WITH_LDH=false + + +debug(){ + if [ $DEBUG = true ]; then + echo $(date "+%Y-%m-%d %H:%M:%S") "debug: "$1 + fi +} + +info(){ + echo $(date "+%Y-%m-%d %H:%M:%S") "info: "$1 +} + + +#help info +help() { + echo "Command Describe" + echo "pull-install pull and install linkis images" + echo "install pull linkis images" + echo "pull pull linkis images" + echo "reset delete the test-helm cluster of kind" + echo "help print help info" + echo "" + echo "Params Describe" + echo "-m url (default:ghcr.io , eg: ghcr.dockerproxy.com)" + echo "-d print debug info" + echo "-l install linkis with ldh" + echo "-t tag name " + echo "" + echo "example:" + echo "./install-linkis-to-kubernetes.sh pull pull image with ghcr.io" + echo "./install-linkis-to-kubernetes.sh -tlatest pull image with tag" + echo "./install-linkis-to-kubernetes.sh pull -mghcr.dockerproxy.com pull image with ghcr.dockerproxy.com or ghcr.nju.edu.cn" + echo "./install-linkis-to-kubernetes.sh install -l -mghcr.dockerproxy.com install linkis to kind and kubernetes with ldh" + echo "./install-linkis-to-kubernetes.sh pull-install -mghcr.dockerproxy.com pull image and install linkis to kind and kubernetes" +} + +#pull the container image of the linkis +pull(){ + debug ${MIRRORS}/apache/incubator-linkis/linkis-ldh:${TAG} + docker pull ${MIRRORS}/apache/incubator-linkis/linkis-ldh:${TAG} + debug ${MIRRORS}/apache/incubator-linkis/linkis:${TAG} + docker pull ${MIRRORS}/apache/incubator-linkis/linkis:${TAG} + debug ${MIRRORS}/apache/incubator-linkis/linkis-web:${TAG} + docker pull ${MIRRORS}/apache/incubator-linkis/linkis-web:${TAG} +} +#change the label +tag(){ + docker tag ${MIRRORS}/apache/incubator-linkis/linkis:${TAG} linkis:dev + docker tag ${MIRRORS}/apache/incubator-linkis/linkis-web:${TAG} linkis-web:dev + docker tag ${MIRRORS}/apache/incubator-linkis/linkis-ldh:${TAG} linkis-ldh:dev +} +#create an image to carry mysql +make_linikis_image_with_mysql_jdbc(){ + ${ROOT_DIR}/docker/scripts/make-linikis-image-with-mysql-jdbc.sh + docker tag linkis:with-jdbc linkis:dev +} +#creating a kind cluster +create_kind_cluster(){ + ${ROOT_DIR}/helm/scripts/create-kind-cluster.sh +} +#mysql installation +install_mysql(){ + ${ROOT_DIR}/helm/scripts/install-mysql.sh +} +#ldh installation +install_ldh(){ + ${ROOT_DIR}/helm/scripts/install-ldh.sh +} +#linkis installation +install_linkis(){ + if [ $WITH_LDH = true ];then + ${ROOT_DIR}/helm/scripts/install-charts-with-ldh.sh + else + ${ROOT_DIR}/helm/scripts/install-linkis.sh + fi +} +#display pods +display_pods(){ + kubectl get pods -A +} + +install(){ + tag + make_linikis_image_with_mysql_jdbc + create_kind_cluster + install_mysql + install_ldh + install_linkis + display_pods +} + +reset(){ + kind delete clusters test-helm +} + + +check_docker(){ + docker -v >> /dev/null 2>&1 + if [ $? -ne 0 ]; then + echo "Docker is not installed!" + exit 1 + fi +} + +check_kind(){ + kind --version >> /dev/null 2>&1 + if [ $? -ne 0 ]; then + echo "kind is not installed!" + exit 1 + fi +} + +check_kubectl(){ + kubectl >> /dev/null 2>&1 + if [ $? -ne 0 ]; then + echo "kubectl is not installed!" + exit 1 + fi +} + +check_helm(){ + helm version >> /dev/null 2>&1 + if [ $? -ne 0 ]; then + echo "helm is not installed!" + exit 1 + fi +} + + +debug $WORK_DIR + +#entrance to the program +main(){ + + #environmental testing + check_docker + check_kind + check_kubectl + check_helm + + #argument parsing + long_opts="debug,mirrors:" + getopt_cmd=$(getopt -o dm:lt: -n $(basename $0) -- "$@") || \ + { echo -e "\nERROR: Getopt failed. Extra args\n"; exit 1;} + + eval set -- "$getopt_cmd" + while true; do + case "$1" in + -d) DEBUG=true;; + -m) MIRRORS=$2;; + -l) WITH_LDH=true;; + -t) TAG=$2;; + --) shift; break;; + esac + shift + done + + debug "params num:"$# + + #command parsing + if [ $# -eq 0 ]; then + COMMAND="pull-install" + else + COMMAND=$1 + fi + + debug "command is:"$COMMAND + + if [ $COMMAND = "pull-install" ]; then + pull + install + fi + + if [ $COMMAND = "install" ]; then + install + fi + + if [ $COMMAND = "pull" ]; then + pull + fi + + if [ $COMMAND = "reset" ]; then + reset + fi + + if [ $COMMAND = "help" ]; then + help + fi +} + +main $@ diff --git a/linkis-dist/bin/install.sh b/linkis-dist/bin/install.sh index 4154d62bfbf..5b4f28cf1ca 100644 --- a/linkis-dist/bin/install.sh +++ b/linkis-dist/bin/install.sh @@ -364,12 +364,6 @@ then sed -i ${txt} "s#spring.server.port.*#spring.server.port=$ENGINECONNMANAGER_PORT#g" $ecm_conf fi -ecp_conf=$LINKIS_HOME/conf/linkis-cg-engineplugin.properties -if [ "$ENGINECONN_PLUGIN_SERVER_PORT" != "" ] -then - sed -i ${txt} "s#spring.server.port.*#spring.server.port=$ENGINECONN_PLUGIN_SERVER_PORT#g" $ecp_conf -fi - entrance_conf=$LINKIS_HOME/conf/linkis-cg-entrance.properties if [ "$ENTRANCE_PORT" != "" ] then @@ -386,44 +380,21 @@ then sed -i ${txt} "s#spring.server.port.*#spring.server.port=$PUBLICSERVICE_PORT#g" $publicservice_conf fi -metadataquery_conf=$LINKIS_HOME/conf/linkis-ps-metadataquery.properties -if [ "$METADATA_QUERY_PORT" != "" ] -then - sed -i ${txt} "s#spring.server.port.*#spring.server.port=$METADATA_QUERY_PORT#g" $metadataquery_conf -fi - - -##datasource -datasource_conf=$LINKIS_HOME/conf/linkis-ps-data-source-manager.properties -if [ "$DATASOURCE_MANAGER_PORT" != "" ] -then - sed -i ${txt} "s#spring.server.port.*#spring.server.port=$DATASOURCE_MANAGER_PORT#g" $datasource_conf -fi - -echo "update conf $datasource_conf" +echo "update conf $publicservice_conf" if [ "$HIVE_META_URL" != "" ] then - sed -i ${txt} "s#hive.meta.url.*#hive.meta.url=$HIVE_META_URL#g" $datasource_conf sed -i ${txt} "s#hive.meta.url.*#hive.meta.url=$HIVE_META_URL#g" $publicservice_conf fi if [ "$HIVE_META_USER" != "" ] then - sed -i ${txt} "s#hive.meta.user.*#hive.meta.user=$HIVE_META_USER#g" $datasource_conf sed -i ${txt} "s#hive.meta.user.*#hive.meta.user=$HIVE_META_USER#g" $publicservice_conf fi if [ "$HIVE_META_PASSWORD" != "" ] then HIVE_META_PASSWORD=$(echo ${HIVE_META_PASSWORD//'#'/'\#'}) - sed -i ${txt} "s#hive.meta.password.*#hive.meta.password=$HIVE_META_PASSWORD#g" $datasource_conf sed -i ${txt} "s#hive.meta.password.*#hive.meta.password=$HIVE_META_PASSWORD#g" $publicservice_conf fi -cs_conf=$LINKIS_HOME/conf/linkis-ps-cs.properties -if [ "$CS_PORT" != "" ] -then - sed -i ${txt} "s#spring.server.port.*#spring.server.port=$CS_PORT#g" $cs_conf -fi - ##Eanble prometheus for monitoring if [ "true" == "$PROMETHEUS_ENABLE" ] then diff --git a/linkis-dist/deploy-config/db.sh b/linkis-dist/deploy-config/db.sh index 2da3d5f6eb0..64cbb167da7 100644 --- a/linkis-dist/deploy-config/db.sh +++ b/linkis-dist/deploy-config/db.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/deploy-config/linkis-env.sh b/linkis-dist/deploy-config/linkis-env.sh index 857fca76f3c..7cc3610f3a9 100644 --- a/linkis-dist/deploy-config/linkis-env.sh +++ b/linkis-dist/deploy-config/linkis-env.sh @@ -124,11 +124,6 @@ MANAGER_PORT=9101 #ENGINECONNMANAGER_INSTALL_IP=127.0.0.1 ENGINECONNMANAGER_PORT=9102 - -##linkis-cg-engineplugin -#ENGINECONN_PLUGIN_SERVER_INSTALL_IP=127.0.0.1 -ENGINECONN_PLUGIN_SERVER_PORT=9103 - ##linkis-cg-entrance #ENTRANCE_INSTALL_IP=127.0.0.1 ENTRANCE_PORT=9104 @@ -137,20 +132,6 @@ ENTRANCE_PORT=9104 #PUBLICSERVICE_INSTALL_IP=127.0.0.1 PUBLICSERVICE_PORT=9105 -##linkis-ps-cs -#CS_INSTALL_IP=127.0.0.1 -CS_PORT=9108 - - -##linkis-ps-data-source-manager -#DATASOURCE_MANAGER_INSTALL_IP=127.0.0.1 -DATASOURCE_MANAGER_PORT=9109 - -##linkis-ps-metadataquery -#METADATA_QUERY_INSTALL_IP=127.0.0.1 -METADATA_QUERY_PORT=9110 - - ######################################################################################## ## LDAP is for enterprise authorization, if you just want to have a try, ignore it. @@ -164,6 +145,9 @@ export SERVER_HEAP_SIZE="512M" ##The decompression directory and the installation directory need to be inconsistent #LINKIS_HOME=/appcom/Install/LinkisInstall +##The extended lib such mysql-connector-java-*.jar +#LINKIS_EXTENDED_LIB=/appcom/common/linkisExtendedLib + LINKIS_VERSION=1.3.0 # for install @@ -175,9 +159,6 @@ LINKIS_PUBLIC_MODULE=lib/linkis-commons/public-module ##If you want to enable prometheus for monitoring linkis, you can set this export PROMETHEUS_ENABLE=true export PROMETHEUS_ENABLE=false -#If you want to start metadata related microservices, you can set this export ENABLE_METADATA_MANAGE=true -export ENABLE_METADATA_QUERY=true - #If you only want to experience linkis streamlined services, not rely on hdfs #you can set the following configuration to false and for the configuration related to the file directory, #use path mode of [file://] to replace [hdfs://] diff --git a/linkis-dist/docker/ldh.Dockerfile b/linkis-dist/docker/ldh.Dockerfile index 87f164404ad..3b2073f2916 100644 --- a/linkis-dist/docker/ldh.Dockerfile +++ b/linkis-dist/docker/ldh.Dockerfile @@ -25,7 +25,7 @@ FROM ${IMAGE_BASE} as linkis-ldh ARG JDK_VERSION=1.8.0-openjdk ARG JDK_BUILD_REVISION=1.8.0.332.b09-1.el7_9 -ARG MYSQL_JDBC_VERSION=5.1.49 +ARG MYSQL_JDBC_VERSION=8.0.28 ARG HADOOP_VERSION=2.7.2 ARG HIVE_VERSION=2.3.3 @@ -38,13 +38,23 @@ ARG LINKIS_VERSION=0.0.0 RUN useradd -r -s /bin/bash -u 100001 -g root -G wheel hadoop +# if you want to set specific yum repos conf file, you can put its at linkis-dist/docker/CentOS-Base.repo +# and exec [COPY apache-linkis-*-incubating-bin/docker/CentOS-Epel.repo /etc/yum.repos.d/CentOS-Epel.repo] + +# TODO: remove install mysql client when schema-init-tools is ready RUN yum install -y \ - vim unzip curl sudo krb5-workstation sssd crontabs net-tools python-pip \ + less vim unzip curl sudo krb5-workstation sssd crontabs net-tools python-pip glibc-common \ java-${JDK_VERSION}-${JDK_BUILD_REVISION} \ java-${JDK_VERSION}-devel-${JDK_BUILD_REVISION} \ mysql \ && yum clean all +RUN cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && localedef -c -f UTF-8 -i en_US en_US.UTF-8 +ENV LANG=en_US.UTF-8 LANGUAGE=en_US:zh LC_TIME=en_US.UTF-8 +ENV TZ="Asia/Shanghai" + + RUN sed -i "s#^%wheel.*#%wheel ALL=(ALL) NOPASSWD: ALL#g" /etc/sudoers RUN mkdir -p /opt/ldh/${LINKIS_VERSION} \ @@ -70,9 +80,9 @@ RUN mkdir -p /etc/ldh \ && ln -s /opt/ldh/${LINKIS_VERSION}/flink-${FLINK_VERSION} /opt/ldh/current/flink \ && ln -s /opt/ldh/${LINKIS_VERSION}/apache-zookeeper-${ZOOKEEPER_VERSION}-bin /opt/ldh/current/zookeeper -RUN curl -L -o /opt/ldh/current/hive/lib/mysql-connector-java-${MYSQL_JDBC_VERSION}.jar \ - https://repo1.maven.org/maven2/mysql/mysql-connector-java/${MYSQL_JDBC_VERSION}/mysql-connector-java-${MYSQL_JDBC_VERSION}.jar \ - && cp /opt/ldh/current/hive/lib/mysql-connector-java-${MYSQL_JDBC_VERSION}.jar /opt/ldh/current/spark/jars/ +# after create soft link +#ADD ldh-tars/mysql-connector-java-${MYSQL_JDBC_VERSION}.jar /opt/ldh/current/hive/lib/ +#ADD ldh-tars/mysql-connector-java-${MYSQL_JDBC_VERSION}.jar /opt/ldh/current/spark/jars/ ENV JAVA_HOME /etc/alternatives/jre ENV PATH /opt/ldh/current/hadoop/bin:/opt/ldh/current/hive/bin:/opt/ldh/current/spark/bin:/opt/ldh/current/flink/bin:/opt/ldh/current/zookeeper/bin:$PATH diff --git a/linkis-dist/docker/licenses/LICENSE-apache-hive-2.3.3-bin.txt b/linkis-dist/docker/licenses/LICENSE-apache-hive-2.3.3-bin.txt new file mode 100644 index 00000000000..aa88fe5eedc --- /dev/null +++ b/linkis-dist/docker/licenses/LICENSE-apache-hive-2.3.3-bin.txt @@ -0,0 +1,407 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability contains + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +APACHE HIVE SUBCOMPONENTS: + +The Apache Hive project contains subcomponents with separate copyright +notices and license terms. Your use of the source code for the these +subcomponents is subject to the terms and conditions of the following +licenses. + + +For the SQLLine package: + +Copyright (c) 2002, 2003, 2004, 2005 Marc Prud'hommeaux + +From: http://sqlline.sourceforge.net/#license + +"SQLLine is distributed under the BSD License, meaning that you are free to redistribute, modify, or sell the software with almost no restrictions." + +Statement from Marc Prud'hommeaux regarding inconsistent licenses in some SQLLine source files: + +> SQLLine was once GPL, but it was changed to be BSD a few years back. +> Any references to the GPL are vestigial. Hopefully the license +> declaration at http://sqlline.sourceforge.net/#license is sufficiently +> authoritative in this regard. + + +For the org.apache.hive.beeline.ClassNameCompleter class: + +Copyright (c) 2002-2006, Marc Prud'hommeaux +All rights reserved. + +Redistribution and use in source and binary forms, with or +without modification, are permitted provided that the following +conditions are met: + +Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with +the distribution. + +Neither the name of JLine nor the names of its contributors +may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, +OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED +AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED +OF THE POSSIBILITY OF SUCH DAMAGE. + +For org.apache.hadoop.hive.llap.daemon.impl.PriorityBlockingDeque class: + +The BSD 3-Clause License + +Copyright (c) 2007, Aviad Ben Dov + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list +of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or other +materials provided with the distribution. +3. Neither the name of Infomancers, Ltd. nor the names of its contributors may be +used to endorse or promote products derived from this software without specific +prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +For jquery.sparkline.js: + +License: New BSD License (3-clause) + +Copyright (c) 2012, Splunk Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Splunk Inc nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT +SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT +OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +For json.human.js/json.human.css: + +Copyright (c) 2016 Mariano Guerra + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For argparse.py: + +argparse is (c) 2006-2009 Steven J. Bethard . + +The argparse module was contributed to Python as of Python 2.7 and thus +was licensed under the Python license. Same license applies to all files in +the argparse package project. + +For details about the Python License, please see doc/Python-License.txt. + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 +Python Software Foundation; All Rights Reserved" are retained in Python alone or +in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + diff --git a/linkis-dist/docker/licenses/LICENSE-apache-zookeeper-3.5.9-bin.txt b/linkis-dist/docker/licenses/LICENSE-apache-zookeeper-3.5.9-bin.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/linkis-dist/docker/licenses/LICENSE-apache-zookeeper-3.5.9-bin.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/linkis-dist/docker/licenses/LICENSE-flink-1.12.2.txt b/linkis-dist/docker/licenses/LICENSE-flink-1.12.2.txt new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/linkis-dist/docker/licenses/LICENSE-flink-1.12.2.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/linkis-dist/docker/licenses/LICENSE-hadoop-2.7.2.txt b/linkis-dist/docker/licenses/LICENSE-hadoop-2.7.2.txt new file mode 100644 index 00000000000..8a4b1756f29 --- /dev/null +++ b/linkis-dist/docker/licenses/LICENSE-hadoop-2.7.2.txt @@ -0,0 +1,289 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +APACHE HADOOP SUBCOMPONENTS: + +The Apache Hadoop project contains subcomponents with separate copyright +notices and license terms. Your use of the source code for the these +subcomponents is subject to the terms and conditions of the following +licenses. + +For the org.apache.hadoop.util.bloom.* classes: + +/** + * + * Copyright (c) 2005, European Commission project OneLab under contract + * 034819 (http://www.one-lab.org) + * All rights reserved. + * Redistribution and use in source and binary forms, with or + * without modification, are permitted provided that the following + * conditions are met: + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * - Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the distribution. + * - Neither the name of the University Catholique de Louvain - UCL + * nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior + * written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +For portions of the native implementation of slicing-by-8 CRC calculation +in src/main/native/src/org/apache/hadoop/util: + +/** + * Copyright 2008,2009,2010 Massachusetts Institute of Technology. + * All rights reserved. Use of this source code is governed by a + * BSD-style license that can be found in the LICENSE file. + */ + +For src/main/native/src/org/apache/hadoop/io/compress/lz4/{lz4.h,lz4.c,lz4hc.h,lz4hc.c}, + +/* + LZ4 - Fast LZ compression algorithm + Header File + Copyright (C) 2011-2014, Yann Collet. + BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + You can contact the author at : + - LZ4 source repository : http://code.google.com/p/lz4/ + - LZ4 public forum : https://groups.google.com/forum/#!forum/lz4c +*/ diff --git a/linkis-dist/docker/licenses/LICENSE-spark-2.4.3-bin-hadoop2.7.txt b/linkis-dist/docker/licenses/LICENSE-spark-2.4.3-bin-hadoop2.7.txt new file mode 100644 index 00000000000..1346f0622e2 --- /dev/null +++ b/linkis-dist/docker/licenses/LICENSE-spark-2.4.3-bin-hadoop2.7.txt @@ -0,0 +1,517 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +------------------------------------------------------------------------------------ +This project bundles some components that are also licensed under the Apache +License Version 2.0: + +commons-beanutils:commons-beanutils +org.apache.zookeeper:zookeeper +oro:oro +commons-configuration:commons-configuration +commons-digester:commons-digester +com.chuusai:shapeless_2.11 +com.googlecode.javaewah:JavaEWAH +com.twitter:chill-java +com.twitter:chill_2.11 +com.univocity:univocity-parsers +javax.jdo:jdo-api +joda-time:joda-time +net.sf.opencsv:opencsv +org.apache.derby:derby +org.objenesis:objenesis +org.roaringbitmap:RoaringBitmap +org.scalanlp:breeze-macros_2.11 +org.scalanlp:breeze_2.11 +org.typelevel:macro-compat_2.11 +org.yaml:snakeyaml +org.apache.xbean:xbean-asm5-shaded +com.squareup.okhttp3:logging-interceptor +com.squareup.okhttp3:okhttp +com.squareup.okio:okio +org.apache.spark:spark-catalyst_2.11 +org.apache.spark:spark-kvstore_2.11 +org.apache.spark:spark-launcher_2.11 +org.apache.spark:spark-mllib-local_2.11 +org.apache.spark:spark-network-common_2.11 +org.apache.spark:spark-network-shuffle_2.11 +org.apache.spark:spark-sketch_2.11 +org.apache.spark:spark-tags_2.11 +org.apache.spark:spark-unsafe_2.11 +commons-httpclient:commons-httpclient +com.vlkan:flatbuffers +com.ning:compress-lzf +io.airlift:aircompressor +io.dropwizard.metrics:metrics-core +io.dropwizard.metrics:metrics-ganglia +io.dropwizard.metrics:metrics-graphite +io.dropwizard.metrics:metrics-json +io.dropwizard.metrics:metrics-jvm +org.iq80.snappy:snappy +com.clearspring.analytics:stream +com.jamesmurty.utils:java-xmlbuilder +commons-codec:commons-codec +commons-collections:commons-collections +io.fabric8:kubernetes-client +io.fabric8:kubernetes-model +io.netty:netty +io.netty:netty-all +net.hydromatic:eigenbase-properties +net.sf.supercsv:super-csv +org.apache.arrow:arrow-format +org.apache.arrow:arrow-memory +org.apache.arrow:arrow-vector +org.apache.calcite:calcite-avatica +org.apache.calcite:calcite-core +org.apache.calcite:calcite-linq4j +org.apache.commons:commons-crypto +org.apache.commons:commons-lang3 +org.apache.hadoop:hadoop-annotations +org.apache.hadoop:hadoop-auth +org.apache.hadoop:hadoop-client +org.apache.hadoop:hadoop-common +org.apache.hadoop:hadoop-hdfs +org.apache.hadoop:hadoop-mapreduce-client-app +org.apache.hadoop:hadoop-mapreduce-client-common +org.apache.hadoop:hadoop-mapreduce-client-core +org.apache.hadoop:hadoop-mapreduce-client-jobclient +org.apache.hadoop:hadoop-mapreduce-client-shuffle +org.apache.hadoop:hadoop-yarn-api +org.apache.hadoop:hadoop-yarn-client +org.apache.hadoop:hadoop-yarn-common +org.apache.hadoop:hadoop-yarn-server-common +org.apache.hadoop:hadoop-yarn-server-web-proxy +org.apache.httpcomponents:httpclient +org.apache.httpcomponents:httpcore +org.apache.orc:orc-core +org.apache.orc:orc-mapreduce +org.mortbay.jetty:jetty +org.mortbay.jetty:jetty-util +com.jolbox:bonecp +org.json4s:json4s-ast_2.11 +org.json4s:json4s-core_2.11 +org.json4s:json4s-jackson_2.11 +org.json4s:json4s-scalap_2.11 +com.carrotsearch:hppc +com.fasterxml.jackson.core:jackson-annotations +com.fasterxml.jackson.core:jackson-core +com.fasterxml.jackson.core:jackson-databind +com.fasterxml.jackson.dataformat:jackson-dataformat-yaml +com.fasterxml.jackson.module:jackson-module-jaxb-annotations +com.fasterxml.jackson.module:jackson-module-paranamer +com.fasterxml.jackson.module:jackson-module-scala_2.11 +com.github.mifmif:generex +com.google.code.findbugs:jsr305 +com.google.code.gson:gson +com.google.inject:guice +com.google.inject.extensions:guice-servlet +com.twitter:parquet-hadoop-bundle +commons-cli:commons-cli +commons-dbcp:commons-dbcp +commons-io:commons-io +commons-lang:commons-lang +commons-logging:commons-logging +commons-net:commons-net +commons-pool:commons-pool +io.fabric8:zjsonpatch +javax.inject:javax.inject +javax.validation:validation-api +log4j:apache-log4j-extras +log4j:log4j +net.sf.jpam:jpam +org.apache.avro:avro +org.apache.avro:avro-ipc +org.apache.avro:avro-mapred +org.apache.commons:commons-compress +org.apache.commons:commons-math3 +org.apache.curator:curator-client +org.apache.curator:curator-framework +org.apache.curator:curator-recipes +org.apache.directory.api:api-asn1-api +org.apache.directory.api:api-util +org.apache.directory.server:apacheds-i18n +org.apache.directory.server:apacheds-kerberos-codec +org.apache.htrace:htrace-core +org.apache.ivy:ivy +org.apache.mesos:mesos +org.apache.parquet:parquet-column +org.apache.parquet:parquet-common +org.apache.parquet:parquet-encoding +org.apache.parquet:parquet-format +org.apache.parquet:parquet-hadoop +org.apache.parquet:parquet-jackson +org.apache.thrift:libfb303 +org.apache.thrift:libthrift +org.codehaus.jackson:jackson-core-asl +org.codehaus.jackson:jackson-mapper-asl +org.datanucleus:datanucleus-api-jdo +org.datanucleus:datanucleus-core +org.datanucleus:datanucleus-rdbms +org.lz4:lz4-java +org.spark-project.hive:hive-beeline +org.spark-project.hive:hive-cli +org.spark-project.hive:hive-exec +org.spark-project.hive:hive-jdbc +org.spark-project.hive:hive-metastore +org.xerial.snappy:snappy-java +stax:stax-api +xerces:xercesImpl +org.codehaus.jackson:jackson-jaxrs +org.codehaus.jackson:jackson-xc +org.eclipse.jetty:jetty-client +org.eclipse.jetty:jetty-continuation +org.eclipse.jetty:jetty-http +org.eclipse.jetty:jetty-io +org.eclipse.jetty:jetty-jndi +org.eclipse.jetty:jetty-plus +org.eclipse.jetty:jetty-proxy +org.eclipse.jetty:jetty-security +org.eclipse.jetty:jetty-server +org.eclipse.jetty:jetty-servlet +org.eclipse.jetty:jetty-servlets +org.eclipse.jetty:jetty-util +org.eclipse.jetty:jetty-webapp +org.eclipse.jetty:jetty-xml + +core/src/main/java/org/apache/spark/util/collection/TimSort.java +core/src/main/resources/org/apache/spark/ui/static/bootstrap* +core/src/main/resources/org/apache/spark/ui/static/jsonFormatter* +core/src/main/resources/org/apache/spark/ui/static/vis* +docs/js/vendor/bootstrap.js + + +------------------------------------------------------------------------------------ +This product bundles various third-party components under other open source licenses. +This section summarizes those components and their licenses. See licenses-binary/ +for text of these licenses. + + +BSD 2-Clause +------------ + +com.github.luben:zstd-jni +javolution:javolution +com.esotericsoftware:kryo-shaded +com.esotericsoftware:minlog +com.esotericsoftware:reflectasm +com.google.protobuf:protobuf-java +org.codehaus.janino:commons-compiler +org.codehaus.janino:janino +jline:jline +org.jodd:jodd-core + + +BSD 3-Clause +------------ + +dk.brics.automaton:automaton +org.antlr:antlr-runtime +org.antlr:ST4 +org.antlr:stringtemplate +org.antlr:antlr4-runtime +antlr:antlr +com.github.fommil.netlib:core +com.thoughtworks.paranamer:paranamer +org.scala-lang:scala-compiler +org.scala-lang:scala-library +org.scala-lang:scala-reflect +org.scala-lang.modules:scala-parser-combinators_2.11 +org.scala-lang.modules:scala-xml_2.11 +org.fusesource.leveldbjni:leveldbjni-all +net.sourceforge.f2j:arpack_combined_all +xmlenc:xmlenc +net.sf.py4j:py4j +org.jpmml:pmml-model +org.jpmml:pmml-schema + +python/lib/py4j-*-src.zip +python/pyspark/cloudpickle.py +python/pyspark/join.py +core/src/main/resources/org/apache/spark/ui/static/d3.min.js + +The CSS style for the navigation sidebar of the documentation was originally +submitted by Óscar Nájera for the scikit-learn project. The scikit-learn project +is distributed under the 3-Clause BSD license. + + +MIT License +----------- + +org.spire-math:spire-macros_2.11 +org.spire-math:spire_2.11 +org.typelevel:machinist_2.11 +net.razorvine:pyrolite +org.slf4j:jcl-over-slf4j +org.slf4j:jul-to-slf4j +org.slf4j:slf4j-api +org.slf4j:slf4j-log4j12 +com.github.scopt:scopt_2.11 + +core/src/main/resources/org/apache/spark/ui/static/dagre-d3.min.js +core/src/main/resources/org/apache/spark/ui/static/*dataTables* +core/src/main/resources/org/apache/spark/ui/static/graphlib-dot.min.js +ore/src/main/resources/org/apache/spark/ui/static/jquery* +core/src/main/resources/org/apache/spark/ui/static/sorttable.js +docs/js/vendor/anchor.min.js +docs/js/vendor/jquery* +docs/js/vendor/modernizer* + + +Common Development and Distribution License (CDDL) 1.0 +------------------------------------------------------ + +javax.activation:activation http://www.oracle.com/technetwork/java/javase/tech/index-jsp-138795.html +javax.xml.stream:stax-api https://jcp.org/en/jsr/detail?id=173 + + +Common Development and Distribution License (CDDL) 1.1 +------------------------------------------------------ + +javax.annotation:javax.annotation-api https://jcp.org/en/jsr/detail?id=250 +javax.servlet:javax.servlet-api https://javaee.github.io/servlet-spec/ +javax.transaction:jta http://www.oracle.com/technetwork/java/index.html +javax.ws.rs:javax.ws.rs-api https://github.com/jax-rs +javax.xml.bind:jaxb-api https://github.com/javaee/jaxb-v2 +org.glassfish.hk2:hk2-api https://github.com/javaee/glassfish +org.glassfish.hk2:hk2-locator (same) +org.glassfish.hk2:hk2-utils +org.glassfish.hk2:osgi-resource-locator +org.glassfish.hk2.external:aopalliance-repackaged +org.glassfish.hk2.external:javax.inject +org.glassfish.jersey.bundles.repackaged:jersey-guava +org.glassfish.jersey.containers:jersey-container-servlet +org.glassfish.jersey.containers:jersey-container-servlet-core +org.glassfish.jersey.core:jersey-client +org.glassfish.jersey.core:jersey-common +org.glassfish.jersey.core:jersey-server +org.glassfish.jersey.media:jersey-media-jaxb + + +Mozilla Public License (MPL) 1.1 +-------------------------------- + +com.github.rwl:jtransforms https://sourceforge.net/projects/jtransforms/ + + +Python Software Foundation License +---------------------------------- + +pyspark/heapq3.py + + +Public Domain +------------- + +aopalliance:aopalliance +net.iharder:base64 +org.tukaani:xz + + +Creative Commons CC0 1.0 Universal Public Domain Dedication +----------------------------------------------------------- +(see LICENSE-CC0.txt) + +data/mllib/images/kittens/29.5.a_b_EGDP022204.jpg +data/mllib/images/kittens/54893.jpg +data/mllib/images/kittens/DP153539.jpg +data/mllib/images/kittens/DP802813.jpg +data/mllib/images/multi-channel/chr30.4.184.jpg diff --git a/linkis-dist/docker/linkis-with-mysql-jdbc.Dockerfile b/linkis-dist/docker/linkis-with-mysql-jdbc.Dockerfile new file mode 100644 index 00000000000..e8fed01d7d8 --- /dev/null +++ b/linkis-dist/docker/linkis-with-mysql-jdbc.Dockerfile @@ -0,0 +1,29 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +ARG LINKIS_IMAGE=linkis:dev + +###################################################################### +# linkis image with mysql jdbc +###################################################################### +FROM ${LINKIS_IMAGE} + +ARG LINKIS_HOME=/opt/linkis +ARG MYSQL_JDBC_VERSION=8.0.28 + +COPY mysql-connector-java-${MYSQL_JDBC_VERSION}.jar ${LINKIS_HOME}/lib/linkis-commons/public-module/ +COPY mysql-connector-java-${MYSQL_JDBC_VERSION}.jar ${LINKIS_HOME}/lib/linkis-spring-cloud-services/linkis-mg-gateway/ diff --git a/linkis-dist/docker/linkis.Dockerfile b/linkis-dist/docker/linkis.Dockerfile index ba765720675..4a9fa98862a 100644 --- a/linkis-dist/docker/linkis.Dockerfile +++ b/linkis-dist/docker/linkis.Dockerfile @@ -15,21 +15,39 @@ # limitations under the License. # -###################################################################### -# linkis image -###################################################################### - ARG IMAGE_BASE=centos:7 ARG IMAGE_BASE_WEB=nginx:1.19.6 -FROM ${IMAGE_BASE} as linkis - -ARG BUILD_TYPE=dev +###################################################################### +# linkis image base +###################################################################### +FROM ${IMAGE_BASE} as linkis-base ARG JDK_VERSION=1.8.0-openjdk ARG JDK_BUILD_REVISION=1.8.0.332.b09-1.el7_9 -ARG MYSQL_JDBC_VERSION=5.1.49 +# if you want to set specific yum repos conf file, you can put its at linkis-dist/docker/CentOS-Base.repo +# and exec [COPY apache-linkis-*-incubating-bin/docker/CentOS-Epel.repo /etc/yum.repos.d/CentOS-Epel.repo] + +# TODO: remove install mysql client when schema-init-tools is ready +RUN yum install -y \ + less vim unzip curl sudo krb5-workstation sssd crontabs net-tools python-pip glibc-common \ + java-${JDK_VERSION}-${JDK_BUILD_REVISION} \ + java-${JDK_VERSION}-devel-${JDK_BUILD_REVISION} \ + mysql \ + && yum clean all + +RUN cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && localedef -c -f UTF-8 -i en_US en_US.UTF-8 +ENV LANG=en_US.UTF-8 LANGUAGE=en_US:zh LC_TIME=en_US.UTF-8 +ENV TZ="Asia/Shanghai" + + + +###################################################################### +# linkis image for release +###################################################################### +FROM linkis-base as linkis ARG LINKIS_VERSION=0.0.0 ARG LINKIS_SYSTEM_USER="hadoop" @@ -38,45 +56,38 @@ ARG LINKIS_SYSTEM_UID="9001" ARG LINKIS_HOME=/opt/linkis ARG LINKIS_CONF_DIR=/etc/linkis-conf ARG LINKIS_LOG_DIR=/var/logs/linkis +ARG LDH_HOME=/opt/ldh WORKDIR ${LINKIS_HOME} RUN useradd -r -s /bin/bash -u ${LINKIS_SYSTEM_UID} -g root -G wheel ${LINKIS_SYSTEM_USER} - -# TODO: remove install mysql client when schema-init-tools is ready -RUN yum install -y \ - vim unzip curl sudo krb5-workstation sssd crontabs python-pip \ - java-${JDK_VERSION}-${JDK_BUILD_REVISION} \ - java-${JDK_VERSION}-devel-${JDK_BUILD_REVISION} \ - mysql \ - && yum clean all - RUN sed -i "s#^%wheel.*#%wheel ALL=(ALL) NOPASSWD: ALL#g" /etc/sudoers RUN mkdir -p /opt/tmp \ && mkdir -p ${LINKIS_CONF_DIR} \ - && mkdir -p ${LINKIS_LOG_DIR} + && mkdir -p ${LINKIS_LOG_DIR} \ + && mkdir -p ${LDH_HOME} ENV JAVA_HOME /etc/alternatives/jre ENV LINKIS_CONF_DIR ${LINKIS_CONF_DIR} ENV LINKIS_CLIENT_CONF_DIR ${LINKIS_CONF_DIR} ENV LINKIS_HOME ${LINKIS_HOME} -ADD apache-linkis-${LINKIS_VERSION}-incubating-bin.tar.gz /opt/tmp/ +# can do some pre-operations +ADD apache-linkis-${LINKIS_VERSION}-incubating-bin /opt/tmp/ RUN mv /opt/tmp/linkis-package/* ${LINKIS_HOME}/ \ + && mv /opt/tmp/LICENSE ${LINKIS_HOME}/ \ + && mv /opt/tmp/NOTICE ${LINKIS_HOME}/ \ + && mv /opt/tmp/DISCLAIMER ${LINKIS_HOME}/ \ + && mv /opt/tmp/README.md ${LINKIS_HOME}/ \ + && mv /opt/tmp/README_CN.md ${LINKIS_HOME}/ \ && rm -rf /opt/tmp -# Put mysql-connector-java-*.jar package into the image only in development mode -RUN if [ "$BUILD_TYPE" = "dev" ] ; then \ - curl -L -o ${LINKIS_HOME}/lib/linkis-commons/public-module/mysql-connector-java-${MYSQL_JDBC_VERSION}.jar \ - https://repo1.maven.org/maven2/mysql/mysql-connector-java/${MYSQL_JDBC_VERSION}/mysql-connector-java-${MYSQL_JDBC_VERSION}.jar \ - && cp ${LINKIS_HOME}/lib/linkis-commons/public-module/mysql-connector-java-${MYSQL_JDBC_VERSION}.jar ${LINKIS_HOME}/lib/linkis-spring-cloud-services/linkis-mg-gateway/ ;\ - fi - RUN chmod g+w -R ${LINKIS_HOME} && chown ${LINKIS_SYSTEM_USER}:${LINKIS_SYSTEM_GROUP} -R ${LINKIS_HOME} \ && chmod g+w -R ${LINKIS_CONF_DIR} && chown ${LINKIS_SYSTEM_USER}:${LINKIS_SYSTEM_GROUP} -R ${LINKIS_CONF_DIR} \ && chmod g+w -R ${LINKIS_LOG_DIR} && chown ${LINKIS_SYSTEM_USER}:${LINKIS_SYSTEM_GROUP} -R ${LINKIS_LOG_DIR} \ + && chmod g+w -R ${LDH_HOME} && chown ${LINKIS_SYSTEM_USER}:${LINKIS_SYSTEM_GROUP} -R ${LDH_HOME} \ && chmod a+x ${LINKIS_HOME}/bin/* \ && chmod a+x ${LINKIS_HOME}/sbin/* @@ -84,7 +95,6 @@ USER ${LINKIS_SYSTEM_USER} ENTRYPOINT ["/bin/bash"] - ###################################################################### # linkis web image ###################################################################### diff --git a/linkis-dist/docker/scripts/entry-point-ldh.sh b/linkis-dist/docker/scripts/entry-point-ldh.sh index 252f344982d..6afa228902f 100644 --- a/linkis-dist/docker/scripts/entry-point-ldh.sh +++ b/linkis-dist/docker/scripts/entry-point-ldh.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -34,6 +34,18 @@ hdfs dfs -mkdir -p /spark2-history hdfs dfs -chmod -R 777 /spark2-history hdfs dfs -mkdir -p /completed-jobs hdfs dfs -chmod -R 777 /completed-jobs +hdfs dfs -chmod 777 / + +#copy mysql-connector-java-*.jar from shared directory +mysql_connector_jar_path=/opt/ldh/current/hive/lib/mysql-connector-java-*.jar +jar_files=$(ls $mysql_connector_jar_path 2> /dev/null | wc -l); + +if [ "$jar_files" == "0" ] ;then #if not exist + echo "try to copy mysql-connector-java-*.jar to /opt/ldh/current/hive/lib/ and /opt/ldh/current/spark/jars/" + cp /opt/common/extendlib/mysql-connector-java-*.jar /opt/ldh/current/hive/lib/ + cp /opt/common/extendlib/mysql-connector-java-*.jar /opt/ldh/current/spark/jars/ +fi + # - hive /opt/ldh/current/hive/bin/schematool -initSchema -dbType mysql @@ -44,7 +56,7 @@ hdfs dfs -chmod -R 777 /completed-jobs /opt/ldh/current/spark/sbin/start-history-server.sh # flink -/opt/ldh/current/flink/bin/yarn-session.sh --detached +HADOOP_CLASSPATH=`hadoop classpath` /opt/ldh/current/flink/bin/yarn-session.sh --detached # zookeeper /opt/ldh/current/zookeeper/bin/zkServer.sh start diff --git a/linkis-dist/docker/scripts/make-linikis-image-with-mysql-jdbc.sh b/linkis-dist/docker/scripts/make-linikis-image-with-mysql-jdbc.sh new file mode 100755 index 00000000000..8733294733b --- /dev/null +++ b/linkis-dist/docker/scripts/make-linikis-image-with-mysql-jdbc.sh @@ -0,0 +1,51 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +WORK_DIR=`cd $(dirname $0); pwd -P` + +. ${WORK_DIR}/utils.sh + +IMAGE_NAME=${IMAGE_NAME:-linkis:with-jdbc} +LINKIS_IMAGE=${LINKIS_IMAGE:-linkis:dev} +LINKIS_HOME=${LINKIS_HOME:-/opt/linkis} +MYSQL_JDBC_VERSION=${MYSQL_JDBC_VERSION:-8.0.28} +MYSQL_JDBC_FILENAME=mysql-connector-java-${MYSQL_JDBC_VERSION}.jar +MYSQL_JDBC_URL="https://repo1.maven.org/maven2/mysql/mysql-connector-java/${MYSQL_JDBC_VERSION}/${MYSQL_JDBC_FILENAME}" + +BUILD_DIR=`mktemp -d -t linkis-build-XXXXX` + +echo "# build dir: ${BUILD_DIR}" +echo "# base image: ${LINKIS_IMAGE}" +echo "# mysql jdbc version: ${MYSQL_JDBC_VERSION}" + +download ${MYSQL_JDBC_URL} ${MYSQL_JDBC_FILENAME} ${BUILD_DIR} + +echo "try to exec: docker build -f ${WORK_DIR}/../linkis-with-mysql-jdbc.Dockerfile \ + -t ${IMAGE_NAME} \ + --build-arg LINKIS_IMAGE=${LINKIS_IMAGE} \ + --build-arg LINKIS_HOME=${LINKIS_HOME} \ + --build-arg MYSQL_JDBC_VERSION=${MYSQL_JDBC_VERSION} \ + ${BUILD_DIR}" + +docker build -f ${WORK_DIR}/../linkis-with-mysql-jdbc.Dockerfile \ + -t ${IMAGE_NAME} \ + --build-arg LINKIS_IMAGE=${LINKIS_IMAGE} \ + --build-arg LINKIS_HOME=${LINKIS_HOME} \ + --build-arg MYSQL_JDBC_VERSION=${MYSQL_JDBC_VERSION} \ + ${BUILD_DIR} + +echo "# done, image: ${IMAGE_NAME}" diff --git a/linkis-dist/docker/scripts/prepare-ldh-image.sh b/linkis-dist/docker/scripts/prepare-ldh-image.sh index dce3f8fdefc..791c7c731b8 100755 --- a/linkis-dist/docker/scripts/prepare-ldh-image.sh +++ b/linkis-dist/docker/scripts/prepare-ldh-image.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -15,28 +15,10 @@ # # -download() { - TAR_URL=$1 - TAR_FILE=$2 - HARD_LINK_ROOT=$3 - - if [ ! -f ${TAR_CACHE_ROOT}/${TAR_FILE} ]; then - echo "- downloading ${TAR_FILE} to ${TAR_CACHE_ROOT} from ${TAR_URL}" - curl -L ${TAR_URL} -o ${TAR_CACHE_ROOT}/${TAR_FILE} - else - echo "- ${TAR_FILE} already exists in ${TAR_CACHE_ROOT}, downloading skipped." - fi - - echo "- create hard link: ${HARD_LINK_ROOT}/${TAR_FILE} -> ${TAR_CACHE_ROOT}/${TAR_FILE}" - rm -rf ${HARD_LINK_ROOT}/${TAR_FILE} - ln ${TAR_CACHE_ROOT}/${TAR_FILE} ${HARD_LINK_ROOT}/${TAR_FILE} -} - WORK_DIR=`cd $(dirname $0); pwd -P` -PROJECT_ROOT=${WORK_DIR}/../.. -PROJECT_TARGET=${PROJECT_ROOT}/target -TAR_CACHE_ROOT=${HOME}/.linkis-build-cache +. ${WORK_DIR}/utils.sh + LDH_TAR_DIR=${PROJECT_TARGET}/ldh-tars mkdir -p ${TAR_CACHE_ROOT} @@ -45,13 +27,13 @@ rm -rf ${LDH_TAR_DIR} && mkdir -p ${LDH_TAR_DIR} rm -rf ${PROJECT_TARGET}/entry-point-ldh.sh cp ${WORK_DIR}/entry-point-ldh.sh ${PROJECT_TARGET}/ -MYSQL_VERSION=${MYSQL_VERSION:-5.7} HADOOP_VERSION=${HADOOP_VERSION:-2.7.2} HIVE_VERSION=${HIVE_VERSION:-2.3.3} SPARK_VERSION=${SPARK_VERSION:-2.4.3} SPARK_HADOOP_VERSION=${SPARK_HADOOP_VERSION:-2.7} FLINK_VERSION=${FLINK_VERSION:-1.12.2} ZOOKEEPER_VERSION=${ZOOKEEPER_VERSION:-3.5.9} +MYSQL_JDBC_VERSION=${MYSQL_JDBC_VERSION:-8.0.28} set -e @@ -62,6 +44,7 @@ TARFILENAME_HIVE="apache-hive-${HIVE_VERSION}-bin.tar.gz" TARFILENAME_SPARK="spark-${SPARK_VERSION}-bin-hadoop${SPARK_HADOOP_VERSION}.tgz" TARFILENAME_FLINK="flink-${FLINK_VERSION}-bin-scala_2.11.tgz" TARFILENAME_ZOOKEEPER="apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz" +TARFILENAME_MYSQL_JDBC=mysql-connector-java-${MYSQL_JDBC_VERSION}.jar DOWNLOAD_URL_HADOOP="https://archive.apache.org/dist/hadoop/common/hadoop-${HADOOP_VERSION}/${TARFILENAME_HADOOP}" DOWNLOAD_URL_HIVE="https://archive.apache.org/dist/hive/hive-${HIVE_VERSION}/${TARFILENAME_HIVE}" @@ -69,8 +52,16 @@ DOWNLOAD_URL_SPARK="https://archive.apache.org/dist/spark/spark-${SPARK_VERSION} DOWNLOAD_URL_FLINK="https://archive.apache.org/dist/flink/flink-${FLINK_VERSION}/${TARFILENAME_FLINK}" DOWNLOAD_URL_ZOOKEEPER="https://archive.apache.org/dist/zookeeper/zookeeper-${ZOOKEEPER_VERSION}/${TARFILENAME_ZOOKEEPER}" + + +DOWNLOAD_URL_MYSQL_JDBC="https://repo1.maven.org/maven2/mysql/mysql-connector-java/${MYSQL_JDBC_VERSION}/${TARFILENAME_MYSQL_JDBC}" + + + download ${DOWNLOAD_URL_HADOOP} ${TARFILENAME_HADOOP} ${LDH_TAR_DIR} download ${DOWNLOAD_URL_HIVE} ${TARFILENAME_HIVE} ${LDH_TAR_DIR} download ${DOWNLOAD_URL_SPARK} ${TARFILENAME_SPARK} ${LDH_TAR_DIR} download ${DOWNLOAD_URL_FLINK} ${TARFILENAME_FLINK} ${LDH_TAR_DIR} download ${DOWNLOAD_URL_ZOOKEEPER} ${TARFILENAME_ZOOKEEPER} ${LDH_TAR_DIR} + +#download ${DOWNLOAD_URL_MYSQL_JDBC} ${TARFILENAME_MYSQL_JDBC} ${LDH_TAR_DIR} \ No newline at end of file diff --git a/linkis-dist/docker/scripts/utils.sh b/linkis-dist/docker/scripts/utils.sh new file mode 100755 index 00000000000..f7813cfe701 --- /dev/null +++ b/linkis-dist/docker/scripts/utils.sh @@ -0,0 +1,40 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +download() { + TAR_URL=$1 + TAR_FILE=$2 + HARD_LINK_ROOT=$3 + + if [ ! -f ${TAR_CACHE_ROOT}/${TAR_FILE} ]; then + echo "- downloading ${TAR_FILE} to ${TAR_CACHE_ROOT} from ${TAR_URL}" + curl -L ${TAR_URL} -o ${TAR_CACHE_ROOT}/${TAR_FILE} + else + echo "- ${TAR_FILE} already exists in ${TAR_CACHE_ROOT}, downloading skipped." + fi + + echo "- cp: ${TAR_CACHE_ROOT}/${TAR_FILE} -> ${HARD_LINK_ROOT}/${TAR_FILE} " + rm -rf ${HARD_LINK_ROOT}/${TAR_FILE} + # ln maybe cause invalid cross-device link + cp ${TAR_CACHE_ROOT}/${TAR_FILE} ${HARD_LINK_ROOT}/${TAR_FILE} +} + +WORK_DIR=`cd $(dirname $0); pwd -P` + +PROJECT_ROOT=${WORK_DIR}/../.. +PROJECT_TARGET=${PROJECT_ROOT}/target +TAR_CACHE_ROOT=${HOME}/.linkis-build-cache diff --git a/linkis-dist/helm/README.md b/linkis-dist/helm/README.md index 22818decd75..2943fa1f6ce 100644 --- a/linkis-dist/helm/README.md +++ b/linkis-dist/helm/README.md @@ -4,34 +4,35 @@ Helm charts for Linkis [![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html) # Pre-requisites + > Note: KinD is required only for development and testing. + * [Kubernetes](https://kubernetes.io/docs/setup/), minimum version v1.21.0+ * [Helm](https://helm.sh/docs/intro/install/), minimum version v3.0.0+. * [KinD](https://kind.sigs.k8s.io/docs/user/quick-start/), minimum version v0.11.0+. - # Installation ```shell -# Deploy Apache Linkis on kubernetes, kubernetes +# Deploy Apache Linkis on kubernetes, kubernetes # namespace is 'linkis', helm release is 'linkis-demo' # Option 1, use build-in script $> ./scripts/install-charts.sh linkis linkis-demo # Option 2, use `helm` command line -$> helm install --create-namespace -f ./charts/linkis/values.yaml --namespace linkis linkis-demo ./charts/linkis +$> helm install --create-namespace -f ./charts/linkis/values.yaml --namespace linkis linkis-demo ./charts/linkis ``` # Uninstallation ```shell -$> helm delete --namespace linkis linkis-demo +$> helm delete --namespace linkis linkis-demo ``` # For developers -We recommend using [KinD](https://kind.sigs.k8s.io/docs/user/quick-start/) for development and testing. +We recommend using [KinD](https://kind.sigs.k8s.io/docs/user/quick-start/) for development and testing. KinD is a tool for running local Kubernetes clusters using Docker container as “Kubernetes nodes”. Follow the link below to install the KinD in your development environment. @@ -39,23 +40,24 @@ Follow the link below to install the KinD in your development environment. - [KinD Installation](https://kind.sigs.k8s.io/docs/user/quick-start/#installation) ## Setup a local cluster for test + Once after you have installed KinD, you can run the following command to setup a local kubernetes cluster and deploy an Apache Linkis cluster on it. ```shell # It will deploy a MySQL instance in the KinD cluster, -# then deploy an Apache Linkis cluster, which will use -# the MySQL instances above +# then deploy an Apache Linkis cluster, which will use +# the MySQL instances above $> sh ./scripts/create-kind-cluster.sh \ && sh ./scripts/install-mysql.sh \ && sh ./scripts/install-charts.sh - + Creating cluster "test-helm" ... - ✓ Ensuring node image (kindest/node:v1.21.1) 🖼 + ✓ Ensuring node image (kindest/node:v1.21.1) 🖼 ✓ Preparing nodes 📦 - ✓ Writing configuration 📜 - ✓ Starting control-plane 🕹️ - ✓ Installing CNI 🔌 - ✓ Installing StorageClass 💾 + ✓ Writing configuration 📜 + ✓ Starting control-plane 🕹️ + ✓ Installing CNI 🔌 + ✓ Installing StorageClass 💾 Set kubectl context to "kind-test-helm" You can now use your cluster with: @@ -94,7 +96,9 @@ Enjoy! ``` ## Enable port-forward for jvm remote debug + > INFO: [Understand how port-forward works.](https://kubernetes.io/docs/tasks/access-application-cluster/port-forward-access-application-cluster/) + ```shell # start port-forward for all servers $> ./scripts/remote-debug-proxy.sh start @@ -110,12 +114,12 @@ $> ./scripts/remote-debug-proxy.sh start - starting port-forwad for [cg-engineconnmanager] with mapping [local->5009:5005->pod] ... - starting port-forwad for [cg-engineplugin] with mapping [local->5010:5005->pod] ... -# Once the port-forward setup, you can configure the jvm remote debugger of you IDE +# Once the port-forward setup, you can configure the jvm remote debugger of you IDE # to connect to the local port, which is mapping to a backend server port, and start # the remote debug process. # list exists port-forward instances -$> sh ./scripts/remote-debug-proxy.sh list +$> sh ./scripts/remote-debug-proxy.sh list hadoop 65439 0.0 0.1 5054328 30344 s013 S 8:01PM 0:00.13 kubectl port-forward -n linkis pod/linkis-demo-cg-engineplugin-548b8cf695-g4hnp 5010:5005 --address=0.0.0.0 hadoop 65437 0.0 0.1 5054596 30816 s013 S 8:01PM 0:00.13 kubectl port-forward -n linkis pod/linkis-demo-cg-engineconnmanager-868d8d4d6f-dqt7d 5009:5005 --address=0.0.0.0 hadoop 65435 0.0 0.1 5051256 31128 s013 S 8:01PM 0:00.14 kubectl port-forward -n linkis pod/linkis-demo-cg-entrance-7dc7b477d4-87fdt 5008:5005 --address=0.0.0.0 @@ -145,10 +149,12 @@ $> sh ./scripts/remote-debug-proxy.sh stop ``` ## Enter into a backend server container + ```shell # Enter into the mg-gateway and submit a job with linkis-cli $> sh ./scripts/login-pod.sh mg-gateway -``` +``` + ```shell # in the mg-gateway container bash-4.2$ ./bin/./linkis-cli -engineType shell-1 -codeType shell -code "echo \"hello\" " -submitUser hadoop -proxyUser hadoop @@ -167,7 +173,7 @@ ExecId:exec_id018016linkis-cg-entrance10.244.0.13:9104LINKISCLI_hadoop_shell_0 2022-07-31 16:24:24.024 INFO Program is substituting variables for you 2022-07-31 16:24:24.024 INFO Variables substitution ended successfully 2022-07-31 16:24:24.024 WARN The code you submit will not be limited by the limit -Job with jobId : 1 and execID : LINKISCLI_hadoop_shell_0 submitted +Job with jobId : 1 and execID : LINKISCLI_hadoop_shell_0 submitted 2022-07-31 16:24:25.024 INFO You have submitted a new job, script code (after variable substitution) is ************************************SCRIPT CODE************************************ echo "hello" @@ -181,17 +187,20 @@ Your job is being scheduled by orchestrator. ``` ## Destroy the local cluster + ```shell # Option 1: delete the helm release only -$> helm delete --namespace linkis linkis-demo +$> helm delete --namespace linkis linkis-demo # Option 2: destroy the KinD cluster, no need to delete # the helm release first $> kind delete cluster --name test-helm ``` -## Test with LDH +## Test with LDH + We introduced a new image, called LDH (Linkis's hadoop all-in-one image), which provides a pseudo-distributed hadoop cluster for testing quickly. This image contains the following hadoop components, the default mode for engines in LDH is on-yarn. + * Hadoop 2.7.2 , including HDFS and YARN * Hive 2.3.3 * Spark 2.4.3 @@ -221,7 +230,7 @@ $> sh ./scripts/create-kind-cluster.sh \ && sh ./scripts/install-mysql.sh \ && sh ./scripts/install-ldh.sh \ && sh ./scripts/install-charts.sh - + ... # take a try diff --git a/linkis-dist/helm/README_CN.md b/linkis-dist/helm/README_CN.md new file mode 100644 index 00000000000..83c84376728 --- /dev/null +++ b/linkis-dist/helm/README_CN.md @@ -0,0 +1,332 @@ +Linkis Helm Charts 组件 +========== + +[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html) + +# 前置条件 +> 注意: 仅在开发和测试阶段才需要 KinD. +* [Kubernetes](https://kubernetes.io/docs/setup/), 最低版本 v1.21.0+ +* [Helm](https://helm.sh/docs/intro/install/), 最低版本 v3.0.0+. +* [KinD](https://kind.sigs.k8s.io/docs/user/quick-start/), 最低版本 v0.11.0+. + + +# 安装流程 + +```shell +# 在 kubernetes 上安装 Apache Linkis, Linkis 会被部署在名为'linkis'的名字空间中,对应的 Helm Release 名为 'linkis-demo' + +# 选项 1, 使用 Linkis 项目提供的脚本来部署 +$> ./scripts/install-charts.sh linkis linkis-demo + +# 选项 2, 使用 Helm 命令来部署 +$> helm install --create-namespace -f ./charts/linkis/values.yaml --namespace linkis linkis-demo ./charts/linkis +``` + +# 卸载流程 + +```shell +$> helm delete --namespace linkis linkis-demo +``` + +# 开发者工具 + +建议使用 [KinD](https://kind.sigs.k8s.io/docs/user/quick-start/) 来进行 Helm Charts 的开发和测试。KinD 是一个使用Docker容器作为 +"Kubernetes节点" 来运行本地 Kubernetes 集群的工具。 + +本地部署 KinD 工具的详细流程请参考如下文档: + +- [KinD Installation](https://kind.sigs.k8s.io/docs/user/quick-start/#installation) + +## 部署 Linkis 组件进行测试 +当你已经在开发环境中完成了KinD的安装后,可以通过运行以下命令在开发机上拉起一个 kubernetes 集群,并在上面部署 Apache Linkis 组件。 + + +```shell +# 以下命令会在 KinD 集群上部署一个 MySQL 实例,同时还会部署一个 Apache Linkis 实例, +# 这个 Apache Linkis 实例会使用这个 MySQL 实例会作为后台数据库. +$> sh ./scripts/create-kind-cluster.sh \ + && sh ./scripts/install-mysql.sh \ + && sh ./scripts/install-charts.sh + +Creating cluster "test-helm" ... + ✓ Ensuring node image (kindest/node:v1.21.1) 🖼 + ✓ Preparing nodes 📦 + ✓ Writing configuration 📜 + ✓ Starting control-plane 🕹️ + ✓ Installing CNI 🔌 + ✓ Installing StorageClass 💾 +Set kubectl context to "kind-test-helm" +You can now use your cluster with: + +kubectl cluster-info --context kind-test-helm + +Have a nice day! 👋 +Image: "linkis:1.3.0" with ID "sha256:917138e97807c3a2d7d7fe4607c1196e7c00406bb3b8f81a3b64e54a4d8fe074" not yet present on node "test-helm-control-plane", loading... +Image: "mysql:5.7" with ID "sha256:efa50097efbdef5884e5ebaba4da5899e79609b78cd4fe91b365d5d9d3205188" not yet present on node "test-helm-control-plane", loading... +namespace/mysql created +service/mysql created +deployment.apps/mysql created + +NAME: linkis-demo +LAST DEPLOYED: Wed Jul 6 23:46:30 2022 +NAMESPACE: linkis +STATUS: deployed +REVISION: 1 +TEST SUITE: None +NOTES: +--- +Welcome to Apache Linkis (v1.3.0)! + +.___ .___ .______ .____/\ .___ .________ +| | : __|: \ : / \: __|| ___/ +| | | : || ||. ___/| : ||___ \ +| |/\ | || | || \ | || / +| / \| ||___| || \| ||__:___/ +|______/|___| |___||___\ /|___| : v1.3.0 + \/ + +Linkis builds a layer of computation middleware between upper applications and underlying engines. +Please visit https://linkis.apache.org/ for details. + +Enjoy! + +``` + +## 开启 port-forward,支持JVM 远程调试 +> INFO: [Understand how port-forward works.](https://kubernetes.io/docs/tasks/access-application-cluster/port-forward-access-application-cluster/) +```shell +# 为每个 Apache Linkis 服务创建一个 port-forward 实例 +$> ./scripts/remote-debug-proxy.sh start +- starting port-forwad for [web] with mapping [local->8087:8087->pod] ... +- starting port-forwad for [mg-eureka] with mapping [local->5001:5005->pod] ... +- starting port-forwad for [mg-gateway] with mapping [local->5002:5005->pod] ... +- starting port-forwad for [ps-cs] with mapping [local->5003:5005->pod] ... +- starting port-forwad for [ps-publicservice] with mapping [local->5004:5005->pod] ... +- starting port-forwad for [ps-metadataquery] with mapping [local->5005:5005->pod] ... +- starting port-forwad for [ps-data-source-manager] with mapping [local->5006:5005->pod] ... +- starting port-forwad for [cg-linkismanager] with mapping [local->5007:5005->pod] ... +- starting port-forwad for [cg-entrance] with mapping [local->5008:5005->pod] ... +- starting port-forwad for [cg-engineconnmanager] with mapping [local->5009:5005->pod] ... +- starting port-forwad for [cg-engineplugin] with mapping [local->5010:5005->pod] ... + +# 一旦 port-forward 创建完成,你就可以通过设置 IDE 的配置, +# 将 JVM 远程调试器连接到本地端口,来启动远程调试。 + +# 获取 port-forward 实例列表 +$> sh ./scripts/remote-debug-proxy.sh list +hadoop 65439 0.0 0.1 5054328 30344 s013 S 8:01PM 0:00.13 kubectl port-forward -n linkis pod/linkis-demo-cg-engineplugin-548b8cf695-g4hnp 5010:5005 --address=0.0.0.0 +hadoop 65437 0.0 0.1 5054596 30816 s013 S 8:01PM 0:00.13 kubectl port-forward -n linkis pod/linkis-demo-cg-engineconnmanager-868d8d4d6f-dqt7d 5009:5005 --address=0.0.0.0 +hadoop 65435 0.0 0.1 5051256 31128 s013 S 8:01PM 0:00.14 kubectl port-forward -n linkis pod/linkis-demo-cg-entrance-7dc7b477d4-87fdt 5008:5005 --address=0.0.0.0 +hadoop 65433 0.0 0.1 5049708 30092 s013 S 8:01PM 0:00.15 kubectl port-forward -n linkis pod/linkis-demo-cg-linkismanager-6f76bb5c65-vc292 5007:5005 --address=0.0.0.0 +hadoop 65431 0.0 0.1 5060716 30012 s013 S 8:01PM 0:00.13 kubectl port-forward -n linkis pod/linkis-demo-ps-data-source-manager-658474588-hjvdw 5006:5005 --address=0.0.0.0 +hadoop 65429 0.0 0.1 5059972 31048 s013 S 8:01PM 0:00.14 kubectl port-forward -n linkis pod/linkis-demo-ps-metadataquery-695877dcf7-r9hnx 5005:5005 --address=0.0.0.0 +hadoop 65427 0.0 0.1 5052268 30860 s013 S 8:01PM 0:00.14 kubectl port-forward -n linkis pod/linkis-demo-ps-publicservice-788cb9674d-7fp7h 5004:5005 --address=0.0.0.0 +hadoop 65423 0.0 0.1 5064312 30428 s013 S 8:01PM 0:00.14 kubectl port-forward -n linkis pod/linkis-demo-ps-cs-6d976869d4-pjfts 5003:5005 --address=0.0.0.0 +hadoop 65421 0.0 0.1 5058912 29996 s013 S 8:01PM 0:00.14 kubectl port-forward -n linkis pod/linkis-demo-mg-gateway-7c4f5f7c98-xv9wd 5002:5005 --address=0.0.0.0 +hadoop 65419 0.0 0.1 5051780 30564 s013 S 8:01PM 0:00.13 kubectl port-forward -n linkis pod/linkis-demo-mg-eureka-0 5001:5005 --address=0.0.0.0 +hadoop 65417 0.0 0.1 5067128 29876 s013 S 8:01PM 0:00.11 kubectl port-forward -n linkis pod/linkis-demo-web-5585ffcddb-swsvh 8087:8087 --address=0.0.0.0 + +# 销毁所有 port-forward 实例 +$> sh ./scripts/remote-debug-proxy.sh stop +- stopping port-forward for [web] with mapping [local->8087:8087->pod] ... +- stopping port-forward for [mg-eureka] with mapping [local->5001:5005->pod] ... +- stopping port-forward for [mg-gateway] with mapping [local->5002:5005->pod] ... +- stopping port-forward for [ps-cs] with mapping [local->5003:5005->pod] ... +- stopping port-forward for [ps-publicservice] with mapping [local->5004:5005->pod] ... +- stopping port-forward for [ps-metadataquery] with mapping [local->5005:5005->pod] ... +- stopping port-forward for [ps-data-source-manager] with mapping [local->5006:5005->pod] ... +- stopping port-forward for [cg-linkismanager] with mapping [local->5007:5005->pod] ... +- stopping port-forward for [cg-entrance] with mapping [local->5008:5005->pod] ... +- stopping port-forward for [cg-engineconnmanager] with mapping [local->5009:5005->pod] ... +- stopping port-forward for [cg-engineplugin] with mapping [local->5010:5005->pod] ... + +``` + +## 登入 Linkis 服务的容器 +```shell +# 进入 mg-gateway 容器,使用=用 linkis-cli 提交一个作业 +$> sh ./scripts/login-pod.sh mg-gateway +``` +```shell +# mg-gateway 容器内 +bash-4.2$ ./bin/linkis-cli -engineType shell-1 -codeType shell -code "echo \"hello\" " -submitUser hadoop -proxyUser hadoop + +=====Java Start Command===== +exec /etc/alternatives/jre/bin/java -server -Xms32m -Xmx2048m -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/opt/linkis/logs/linkis-cli -XX:ErrorFile=/opt/linkis/logs/linkis-cli/ps_err_pid%p.log -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=80 -XX:+DisableExplicitGC -classpath /opt/linkis/conf/linkis-cli:/opt/linkis/lib/linkis-computation-governance/linkis-client/linkis-cli/*:/opt/linkis/lib/linkis-commons/public-module/*: -Dconf.root=/etc/linkis-conf -Dconf.file=linkis-cli.properties -Dlog.path=/opt/linkis/logs/linkis-cli -Dlog.file=linkis-client..log.20220814162421217892600 org.apache.linkis.cli.application.LinkisClientApplication '-engineType shell-1 -codeType shell -code echo "hello" -submitUser hadoop -proxyUser hadoop' +OpenJDK 64-Bit Server VM warning: If the number of processors is expected to increase from one, then you should configure the number of parallel GC threads appropriately using -XX:ParallelGCThreads=N +[INFO] LogFile path: /opt/linkis/logs/linkis-cli/linkis-client..log.20220814162421217892600 +[INFO] User does not provide usr-configuration file. Will use default config +[INFO] connecting to linkis gateway:http://linkis-demo-mg-gateway.linkis.svc.cluster.local:9001 +JobId:1 +TaskId:1 +ExecId:exec_id018016linkis-cg-entrance10.244.0.13:9104LINKISCLI_hadoop_shell_0 +[INFO] Job is successfully submitted! + +2022-07-31 16:24:24.024 INFO Program is substituting variables for you +2022-07-31 16:24:24.024 INFO Variables substitution ended successfully +2022-07-31 16:24:24.024 WARN The code you submit will not be limited by the limit +Job with jobId : 1 and execID : LINKISCLI_hadoop_shell_0 submitted +2022-07-31 16:24:25.024 INFO You have submitted a new job, script code (after variable substitution) is +************************************SCRIPT CODE************************************ +echo "hello" +************************************SCRIPT CODE************************************ +2022-07-31 16:24:25.024 INFO Your job is accepted, jobID is LINKISCLI_hadoop_shell_0 and jobReqId is 1 in ServiceInstance(linkis-cg-entrance, 10.244.0.13:9104). Please wait it to be scheduled +job is scheduled. +2022-07-31 16:24:25.024 INFO Your job is Scheduled. Please wait it to run. +Your job is being scheduled by orchestrator. +2022-07-31 16:24:25.024 INFO job is running. +2022-07-31 16:24:25.024 INFO Your job is Running now. Please wait it to complete. +``` + +## 销毁本地集群 +```shell +# 选项 1: 仅删除 Helm Release +$> helm delete --namespace linkis linkis-demo + +# 选项 2: 销毁整个 KinD 集群 (不需要先删除 Helm Release) +$> kind delete cluster --name test-helm +``` + +## 使用 LDH 进行测试 +我们引入了一个新的镜像,叫做LDH(Linkis 的 hadoop 一体式镜像),它提供了一个伪分布式的 hadoop 集群,方便快速测试 On Hadoop 的部署模式。 +这个镜像包含以下多个 hadoop 组件,LDH 中引擎的默认模式是 on-yarn 的。 +* Hadoop 2.7.2 , 包括 HDFS and YARN +* Hive 2.3.3 +* Spark 2.4.3 +* Flink 1.12.2 +* ZooKeeper 3.5.9 + +> 注意: LDH 这个中的 Hive 组件依赖一个外部的 MySQL 实例,请在部署 LDH 前先部署 MySQL 实例. + +请在项目根目录下运行如下 maven 命令,来构建 LDH 镜像 (当前仅支持 Linux 和 MacOS 系统) + +```shell +$> ./mvnw clean install -Pdocker \ + -Dmaven.javadoc.skip=true \ + -Dmaven.test.skip=true \ + -Dlinkis.build.web=true \ + -Dlinkis.build.ldh=true \ + -Dlinkis.build.with.jdbc=true +``` + +默认情况下,我们从 [Apache Archives](https://archive.apache.org/dist/) 这个官方站点下载每个hadoop组件的预建二进制发行版。 +由于网络的问题,这中方式对某些地区的成员来说可能会非常缓慢。如果你有更快的站点,你可以手动从这些站点下载相应的包,并将其移动到如下这 +个目录`${HOME}/.linkis-build-cache` 来解决这个问题。 + +运行如下的命令来创建一个本地 kubernetes 集群,并在其上部署 LDH 实例。 + +```shell +# 创建 KinD 集群,并部署 Linkis 和 LDH 实例 +$> sh ./scripts/create-kind-cluster.sh \ + && sh ./scripts/install-mysql.sh \ + && sh ./scripts/install-ldh.sh \ + && sh ./scripts/install-charts-with-ldh.sh + +... + +# 快速体验 LDH +$> kubectl exec -it -n ldh $(kubectl get pod -n ldh -o jsonpath='{.items[0].metadata.name}') -- bash + +[root@ldh-96bdc757c-dnkbs /]# hdfs dfs -ls / +Found 4 items +drwxrwxrwx - root supergroup 0 2022-07-31 02:48 /completed-jobs +drwxrwxrwx - root supergroup 0 2022-07-31 02:48 /spark2-history +drwxrwxrwx - root supergroup 0 2022-07-31 02:49 /tmp +drwxrwxrwx - root supergroup 0 2022-07-31 02:48 /user + +[root@ldh-96bdc757c-dnkbs /]# beeline -u jdbc:hive2://ldh.ldh.svc.cluster.local:10000/ -n hadoop +Connecting to jdbc:hive2://ldh.ldh.svc.cluster.local:10000/ +Connected to: Apache Hive (version 2.3.3) +Driver: Hive JDBC (version 2.3.3) +Transaction isolation: TRANSACTION_REPEATABLE_READ +Beeline version 2.3.3 by Apache Hive +0: jdbc:hive2://ldh.ldh.svc.cluster.local:100> create database demo; +No rows affected (1.306 seconds) +0: jdbc:hive2://ldh.ldh.svc.cluster.local:100> use demo; +No rows affected (0.046 seconds) +0: jdbc:hive2://ldh.ldh.svc.cluster.local:100> create table t1 (id int, data string); +No rows affected (0.709 seconds) +0: jdbc:hive2://ldh.ldh.svc.cluster.local:100> insert into t1 values(1, 'linikis demo'); +WARNING: Hive-on-MR is deprecated in Hive 2 and may not be available in the future versions. Consider using a different execution engine (i.e. spark, tez) or using Hive 1.X releases. +No rows affected (5.491 seconds) +0: jdbc:hive2://ldh.ldh.svc.cluster.local:100> select * from t1; ++--------+---------------+ +| t1.id | t1.data | ++--------+---------------+ +| 1 | linikis demo | ++--------+---------------+ +1 row selected (0.39 seconds) +0: jdbc:hive2://ldh.ldh.svc.cluster.local:100> !q + +[root@ldh-96bdc757c-dnkbs /]# spark-sql +22/07/31 02:53:18 INFO hive.metastore: Trying to connect to metastore with URI thrift://ldh.ldh.svc.cluster.local:9083 +22/07/31 02:53:18 INFO hive.metastore: Connected to metastore. +... +22/07/31 02:53:19 INFO spark.SparkContext: Running Spark version 2.4.3 +22/07/31 02:53:19 INFO spark.SparkContext: Submitted application: SparkSQL::10.244.0.6 +... +22/07/31 02:53:27 INFO yarn.Client: Submitting application application_1659235712576_0001 to ResourceManager +22/07/31 02:53:27 INFO impl.YarnClientImpl: Submitted application application_1659235712576_0001 +22/07/31 02:53:27 INFO cluster.SchedulerExtensionServices: Starting Yarn extension services with app application_1659235712576_0001 and attemptId None +22/07/31 02:53:28 INFO yarn.Client: Application report for application_1659235712576_0001 (state: ACCEPTED) +... +22/07/31 02:53:36 INFO yarn.Client: Application report for application_1659235712576_0001 (state: RUNNING) +... +Spark master: yarn, Application Id: application_1659235712576_0001 +22/07/31 02:53:46 INFO thriftserver.SparkSQLCLIDriver: Spark master: yarn, Application Id: application_1659235712576_0001 +spark-sql> use demo; +Time taken: 0.074 seconds +22/07/31 02:58:02 INFO thriftserver.SparkSQLCLIDriver: Time taken: 0.074 seconds +spark-sql> select * from t1; +... +1 linikis demo +2 linkis demo spark sql +Time taken: 3.352 seconds, Fetched 2 row(s) +spark-sql> quit; + +[root@ldh-96bdc757c-dnkbs /]# zkCli.sh +Connecting to localhost:2181 +Welcome to ZooKeeper! +JLine support is enabled +WATCHER:: + +WatchedEvent state:SyncConnected type:None path:null + +[zk: localhost:2181(CONNECTED) 0] get -s /zookeeper/quota + +cZxid = 0x0 +ctime = Thu Jan 01 00:00:00 UTC 1970 +mZxid = 0x0 +mtime = Thu Jan 01 00:00:00 UTC 1970 +pZxid = 0x0 +cversion = 0 +dataVersion = 0 +aclVersion = 0 +ephemeralOwner = 0x0 +dataLength = 0 +numChildren = 0 +[zk: localhost:2181(CONNECTED) 1] quit + +# 以 per-job cluster 模式启动 Flink 作业 +[root@ldh-96bdc757c-dnkbs /]# HADOOP_CLASSPATH=`hadoop classpath` flink run -t yarn-per-job /opt/ldh/current/flink/examples/streaming/TopSpeedWindowing.jar +# 以 session 模式启动 Flink 作业, +# Flink session 在 LDH Pod 启动时会被启动了一个. +[root@ldh-96bdc757c-dnkbs /]# flink run /opt/ldh/current/flink/examples/streaming/TopSpeedWindowing.jar +Executing TopSpeedWindowing example with default input data set. +Use --input to specify file input. +Printing result to stdout. Use --output to specify output path. +... +``` + +你可以通过`ldh.ldh.svc.cluster.local`这个域名来访问kubernetes集群中的LDH服务,例如,从你的 pod中访问 LDH 中的 hdfs。 + +```shell +[root@sample-pod /]# hdfs dfs -ls hdfs://ldh.ldh.svc.cluster.local:9000/ +Found 4 items +drwxrwxrwx - root supergroup 0 2022-07-28 04:58 hdfs://ldh.ldh.svc.cluster.local:9000/completed-jobs +drwxrwxrwx - root supergroup 0 2022-07-28 05:22 hdfs://ldh.ldh.svc.cluster.local:9000/spark2-history +drwxrwxrwx - root supergroup 0 2022-07-28 04:58 hdfs://ldh.ldh.svc.cluster.local:9000/tmp +drwxr-xr-x - root supergroup 0 2022-07-28 05:20 hdfs://ldh.ldh.svc.cluster.local:9000/user +``` + +最后,你可以用`kubectl port-forward`来访问 Linkis 的 Web 控制台。 diff --git a/linkis-dist/helm/charts/linkis/Chart.yaml b/linkis-dist/helm/charts/linkis/Chart.yaml index a4bc4bb7f6c..82cf3d4c7e9 100644 --- a/linkis-dist/helm/charts/linkis/Chart.yaml +++ b/linkis-dist/helm/charts/linkis/Chart.yaml @@ -36,4 +36,4 @@ version: 0.1.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "1.1.3" +appVersion: "1.3.0" diff --git a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml index ba0c369e60d..8b76c0ae6db 100644 --- a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml +++ b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml @@ -992,7 +992,7 @@ data: SET @OPENLOOKENG_IDE=CONCAT('*-IDE,',@OPENLOOKENG_LABEL); -- Global Settings - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'ide', 'None', NULL, '0', '0', '1', '队列资源'); + INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'default', 'None', NULL, '0', '0', '1', '队列资源'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源'); diff --git a/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml b/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml index 843bf3f615e..9775d915991 100644 --- a/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml +++ b/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml @@ -36,17 +36,26 @@ data: log4j2.xml: | - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + linkis-env.sh: | @@ -173,7 +182,7 @@ data: wds.linkis.home={{ .Values.linkis.locations.homeDir }} #Linkis governance station administrators wds.linkis.governance.station.admin=hadoop - wds.linkis.gateway.conf.publicservice.list=query,jobhistory,application,configuration,filesystem,udf,variable,microservice,errorcode,bml,datasource + #wds.linkis.gateway.conf.publicservice.list=query,jobhistory,application,configuration,filesystem,udf,variable,microservice,errorcode,bml,datasource spring.spring.servlet.multipart.max-file-size=500MB spring.spring.servlet.multipart.max-request-size=500MB # note:value of zero means Jetty will never write to disk. @@ -257,12 +266,16 @@ data: spring.server.port={{ .Values.cgEntrance.port }} linkis-ps-publicservice.properties: | + wds.linkis.server.mdm.service.instance.expire-in-seconds=1800 + linkis.workspace.filesystem.auto.create=true ##restful - wds.linkis.server.restful.scan.packages=org.apache.linkis.jobhistory.restful,org.apache.linkis.variable.restful,org.apache.linkis.configuration.restful,org.apache.linkis.udf.api,org.apache.linkis.filesystem.restful,org.apache.linkis.filesystem.restful,org.apache.linkis.instance.label.restful,org.apache.linkis.metadata.restful.api,org.apache.linkis.cs.server.restful,org.apache.linkis.bml.restful,org.apache.linkis.errorcode.server.restful + wds.linkis.server.restful.scan.packages=org.apache.linkis.cs.server.restful,org.apache.linkis.datasourcemanager.core.restful,org.apache.linkis.metadata.query.server.restful,org.apache.linkis.jobhistory.restful,org.apache.linkis.variable.restful,org.apache.linkis.configuration.restful,org.apache.linkis.udf.api,org.apache.linkis.filesystem.restful,org.apache.linkis.filesystem.restful,org.apache.linkis.instance.label.restful,org.apache.linkis.metadata.restful.api,org.apache.linkis.cs.server.restful,org.apache.linkis.bml.restful,org.apache.linkis.errorcode.server.restful + ##mybatis - wds.linkis.server.mybatis.mapperLocations=classpath:org/apache/linkis/jobhistory/dao/impl/*.xml,classpath:org/apache/linkis/variable/dao/impl/*.xml,classpath:org/apache/linkis/configuration/dao/impl/*.xml,classpath:org/apache/linkis/udf/dao/impl/*.xml,classpath:org/apache/linkis/instance/label/dao/impl/*.xml,classpath:org/apache/linkis/metadata/hive/dao/impl/*.xml,org/apache/linkis/metadata/dao/impl/*.xml,classpath:org/apache/linkis/bml/dao/impl/*.xml - wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.configuration.entity,org.apache.linkis.jobhistory.entity,org.apache.linkis.udf.entity,org.apache.linkis.variable.entity,org.apache.linkis.instance.label.entity,org.apache.linkis.manager.entity,org.apache.linkis.metadata.domain,org.apache.linkis.bml.Entity - wds.linkis.server.mybatis.BasePackage=org.apache.linkis.jobhistory.dao,org.apache.linkis.variable.dao,org.apache.linkis.configuration.dao,org.apache.linkis.udf.dao,org.apache.linkis.instance.label.dao,org.apache.linkis.metadata.hive.dao,org.apache.linkis.metadata.dao,org.apache.linkis.bml.dao,org.apache.linkis.errorcode.server.dao,org.apache.linkis.publicservice.common.lock.dao + wds.linkis.server.mybatis.mapperLocations=classpath*:org/apache/linkis/cs/persistence/dao/impl/*.xml,classpath:org/apache/linkis/datasourcemanager/core/dao/mapper/*.xml,classpath:org/apache/linkis/jobhistory/dao/impl/*.xml,classpath:org/apache/linkis/variable/dao/impl/*.xml,classpath:org/apache/linkis/configuration/dao/impl/*.xml,classpath:org/apache/linkis/udf/dao/impl/*.xml,classpath:org/apache/linkis/instance/label/dao/impl/*.xml,classpath:org/apache/linkis/metadata/hive/dao/impl/*.xml,org/apache/linkis/metadata/dao/impl/*.xml,classpath:org/apache/linkis/bml/dao/impl/*.xml + wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.cs.persistence.entity,org.apache.linkis.datasourcemanager.common.domain,org.apache.linkis.datasourcemanager.core.vo,org.apache.linkis.configuration.entity,org.apache.linkis.jobhistory.entity,org.apache.linkis.udf.entity,org.apache.linkis.variable.entity,org.apache.linkis.instance.label.entity,org.apache.linkis.manager.entity,org.apache.linkis.metadata.domain,org.apache.linkis.bml.entity + wds.linkis.server.mybatis.BasePackage=org.apache.linkis.cs.persistence.dao,org.apache.linkis.datasourcemanager.core.dao,org.apache.linkis.jobhistory.dao,org.apache.linkis.variable.dao,org.apache.linkis.configuration.dao,org.apache.linkis.udf.dao,org.apache.linkis.instance.label.dao,org.apache.linkis.metadata.hive.dao,org.apache.linkis.metadata.dao,org.apache.linkis.bml.dao,org.apache.linkis.errorcode.server.dao,org.apache.linkis.publicservice.common.lock.dao + ##workspace wds.linkis.workspace.filesystem.hdfsuserrootpath.suffix=/ wds.linkis.server.component.exclude.classes=org.apache.linkis.entranceclient.conf.ClientForEntranceSpringConfiguration,org.apache.linkis.entranceclient.conf.ClientSpringConfiguration,org.apache.linkis.entrance.conf.EntranceSpringConfiguration @@ -271,51 +284,20 @@ data: hive.meta.url={{ .Values.linkis.deps.hive.meta.url }} hive.meta.user={{ .Values.linkis.deps.hive.meta.user }} hive.meta.password={{ .Values.linkis.deps.hive.meta.password }} + wds.linkis.metadata.hive.encode.enabled=false # associated with the logged-in user when querying metadata:default value is true - #linkis.metadata.hive.permission.with-login-user-enabled - ##Spring - spring.server.port={{ .Values.psPublicService.port }} - spring.spring.main.allow-bean-definition-overriding=true - - linkis-ps-cs.properties: | - ##restful - wds.linkis.server.restful.scan.packages=org.apache.linkis.cs.server.restful - ##mybatis - wds.linkis.server.mybatis.mapperLocations=classpath*:org/apache/linkis/cs/persistence/dao/impl/*.xml - wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.cs.persistence.entity - wds.linkis.server.mybatis.BasePackage=org.apache.linkis.cs.persistence.dao - ##Spring - spring.server.port={{ .Values.psCs.port }} - # ps-cs prefix must be started with 'cs_' - spring.eureka.instance.metadata-map.route=cs_1_dev + #wds.linkis.jobhistory.undone.job.minimum.id=0 + #wds.linkis.jobhistory.undone.job.refreshtime.daily=00:15 wds.linkis.cs.deserialize.replace_package_header.enable=false - linkis-ps-data-source-manager.properties: | - ##restful - wds.linkis.server.restful.scan.packages=org.apache.linkis.datasourcemanager.core.restful - ##mybatis - wds.linkis.server.mybatis.mapperLocations=classpath:org/apache/linkis/datasourcemanager/core/dao/mapper/*.xml - wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.datasourcemanager.common.domain,org.apache.linkis.datasourcemanager.core.vo - wds.linkis.server.mybatis.BasePackage=org.apache.linkis.datasourcemanager.core.dao - - ##hive meta - wds.linkis.metadata.hive.encode.enabled=false - hive.meta.url={{ .Values.linkis.deps.hive.meta.url }} - hive.meta.user={{ .Values.linkis.deps.hive.meta.user }} - hive.meta.password={{ .Values.linkis.deps.hive.meta.password }} + wds.linkis.rpc.conf.enable.local.message=true + wds.linkis.rpc.conf.local.app.list=linkis-ps-publicservice ##Spring - spring.server.port={{ .Values.psDataSourceManager.port }} + spring.server.port={{ .Values.psPublicService.port }} spring.spring.main.allow-bean-definition-overriding=true spring.spring.jackson.serialization.FAIL_ON_EMPTY_BEANS=false - spring.jackson.serialization.FAIL_ON_EMPTY_BEANS=false - - linkis-ps-metadataquery.properties: | - wds.linkis.server.mdm.service.instance.expire-in-seconds=1800 - wds.linkis.server.restful.scan.packages=org.apache.linkis.metadatamanager.server.restful - wds.linkis.server.dsm.app.name=linkis-ps-data-source-manager - - ##Spring - spring.server.port={{ .Values.psMetadataQuery.port }} + # ps-cs prefix must be started with 'cs_' + spring.eureka.instance.metadata-map.route=cs_1_dev diff --git a/linkis-dist/helm/charts/linkis/templates/linkis-cg-engineconnmanager.yaml b/linkis-dist/helm/charts/linkis/templates/linkis-cg-engineconnmanager.yaml index b04b65d8e88..b87d0eb2e16 100644 --- a/linkis-dist/helm/charts/linkis/templates/linkis-cg-engineconnmanager.yaml +++ b/linkis-dist/helm/charts/linkis/templates/linkis-cg-engineconnmanager.yaml @@ -59,7 +59,7 @@ spec: publishNotReadyAddresses: true --- apiVersion: apps/v1 -kind: Deployment +kind: StatefulSet metadata: name: {{ include "linkis.fullname" . }}-cg-engineconnmanager labels: @@ -71,22 +71,50 @@ metadata: {{- toYaml .Values.cgEngineConnManager.annotations | nindent 4 }} {{- end }} spec: - replicas: {{ .Values.cgEngineConnManager.replicas }} + serviceName: {{ include "linkis.fullname" . }}-cg-engineconnmanager-headless selector: matchLabels: {{- include "linkis.cgEngineConnManager.selectorLabels" . | nindent 6 }} app: {{ include "linkis.fullname" . }}-cg-engineconnmanager + replicas: {{ .Values.cgEngineConnManager.replicas }} + podManagementPolicy: {{ .Values.cgEngineConnManager.podManagementPolicy }} + updateStrategy: + type: {{ .Values.cgEngineConnManager.updateStrategy }} template: metadata: - {{- with .Values.cgEngineConnManager.annotations }} - annotations: - {{- toYaml . | nindent 8 }} - {{- end }} labels: app: {{ include "linkis.fullname" . }}-cg-engineconnmanager version: {{ .Chart.AppVersion }} {{- include "linkis.cgEngineConnManager.selectorLabels" . | nindent 8 }} + {{- with .Values.cgEngineConnManager.annotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} spec: + {{- if .Values.schedulerName }} + schedulerName: "{{ .Values.schedulerName }}" + {{- end }} + {{- if .Values.podSecurityContext }} + securityContext: +{{ toYaml .Values.podSecurityContext | indent 8 }} + {{- end }} + {{- if or .Values.serviceAccount.create .Values.serviceAccount.name }} + serviceAccountName: "{{ template "linkis.serviceAccountName" . }}" + {{- end }} + automountServiceAccountToken: {{ .Values.serviceAccount.automountToken }} + terminationGracePeriodSeconds: {{ .Values.mgEureka.terminationGracePeriod }} + {{- with .Values.cgEngineConnManager.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.cgEngineConnManager.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.cgEngineConnManager.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} subdomain: {{ include "linkis.fullname" . }}-cg-engineconnmanager-headless {{- with .Values.imagePullSecrets }} imagePullSecrets: @@ -98,6 +126,10 @@ spec: name: {{ include "linkis.fullname" . }}-linkis-config - name: log emptyDir: {} + - name: common + hostPath: + path: {{ .Values.linkis.locations.commonDir }} + type: DirectoryOrCreate - name: runtime {{- if .Values.linkis.featureGates.localMode }} hostPath: @@ -106,9 +138,24 @@ spec: {{- else }} emptyDir: {} {{- end }} - serviceAccountName: {{ include "linkis.serviceAccountName" . }} - securityContext: - {{- toYaml .Values.cgEngineConnManager.podSecurityContext | nindent 8 }} + + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: hadoop-conf + projected: + sources: + - configMap: + name: {{ .Values.linkis.deps.hadoop.configMapName }} + - name: hive-conf + projected: + sources: + - configMap: + name: {{ .Values.linkis.deps.hive.configMapName }} + - name: spark-conf + projected: + sources: + - configMap: + name: {{ .Values.linkis.deps.spark.configMapName }} + {{- end }} containers: - name: "engineconnmanager" securityContext: @@ -119,7 +166,7 @@ spec: - /bin/bash - -ecx - >- - RUN_IN_FOREGROUND=true {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start cg-engineconnmanager + RUN_IN_FOREGROUND=true LINKIS_EXTENDED_LIB={{.Values.linkis.locations.commonDir}}/extendlib {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start cg-engineconnmanager ports: - name: "http" containerPort: {{ .Values.cgEngineConnManager.port }} @@ -142,6 +189,14 @@ spec: - name: DEBUG_PORT value: "5005" {{- end }} + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: HADOOP_CONF_DIR + value: /etc/hadoop-conf + - name: HIVE_CONF_DIR + value: /etc/hive-conf + - name: SPARK_CONF_DIR + value: /etc/spark-conf + {{- end }} - name: SERVER_HEAP_SIZE value: {{ .Values.cgEngineConnManager.jvmHeapSize }} - name: EUREKA_URL @@ -162,23 +217,21 @@ spec: {{ toYaml .Values.cgEngineConnManager.envs.froms | indent 12 }} {{- end }} volumeMounts: + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: hadoop-conf + mountPath: /etc/hadoop-conf + - name: hive-conf + mountPath: /etc/hive-conf + - name: spark-conf + mountPath: /etc/spark-conf + {{- end }} - name: conf mountPath: {{ .Values.linkis.locations.confDir }} - name: log mountPath: {{ .Values.linkis.locations.logDir }} - name: runtime mountPath: {{ .Values.linkis.locations.runtimeDir }} + - name: common + mountPath: {{ .Values.linkis.locations.commonDir }} resources: {{- toYaml .Values.cgEngineConnManager.resources | nindent 12 }} - {{- with .Values.cgEngineConnManager.nodeSelector }} - nodeSelector: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.cgEngineConnManager.affinity }} - affinity: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.cgEngineConnManager.tolerations }} - tolerations: - {{- toYaml . | nindent 8 }} - {{- end }} diff --git a/linkis-dist/helm/charts/linkis/templates/linkis-cg-engineplugin.yaml b/linkis-dist/helm/charts/linkis/templates/linkis-cg-engineplugin.yaml index 8e2177dce26..130745f1efb 100644 --- a/linkis-dist/helm/charts/linkis/templates/linkis-cg-engineplugin.yaml +++ b/linkis-dist/helm/charts/linkis/templates/linkis-cg-engineplugin.yaml @@ -98,6 +98,10 @@ spec: name: {{ include "linkis.fullname" . }}-linkis-config - name: log emptyDir: {} + - name: common + hostPath: + path: {{ .Values.linkis.locations.commonDir }} + type: DirectoryOrCreate - name: runtime {{- if .Values.linkis.featureGates.localMode }} hostPath: @@ -106,6 +110,23 @@ spec: {{- else }} emptyDir: {} {{- end }} + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: hadoop-conf + projected: + sources: + - configMap: + name: {{ .Values.linkis.deps.hadoop.configMapName }} + - name: hive-conf + projected: + sources: + - configMap: + name: {{ .Values.linkis.deps.hive.configMapName }} + - name: spark-conf + projected: + sources: + - configMap: + name: {{ .Values.linkis.deps.spark.configMapName }} + {{- end }} serviceAccountName: {{ include "linkis.serviceAccountName" . }} securityContext: {{- toYaml .Values.cgEnginePlugin.podSecurityContext | nindent 8 }} @@ -119,7 +140,7 @@ spec: - /bin/bash - -ecx - >- - RUN_IN_FOREGROUND=true {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start cg-engineplugin + RUN_IN_FOREGROUND=true LINKIS_EXTENDED_LIB={{.Values.linkis.locations.commonDir}}/extendlib {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start cg-engineplugin ports: - name: "http" containerPort: {{ .Values.cgEnginePlugin.port }} @@ -142,6 +163,14 @@ spec: - name: DEBUG_PORT value: "5005" {{- end }} + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: HADOOP_CONF_DIR + value: /etc/hadoop-conf + - name: HIVE_CONF_DIR + value: /etc/hive-conf + - name: SPARK_CONF_DIR + value: /etc/spark-conf + {{- end }} - name: SERVER_HEAP_SIZE value: {{ .Values.cgEnginePlugin.jvmHeapSize }} - name: EUREKA_URL @@ -162,12 +191,22 @@ spec: {{ toYaml .Values.cgEnginePlugin.envs.froms | indent 12 }} {{- end }} volumeMounts: + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: hadoop-conf + mountPath: /etc/hadoop-conf + - name: hive-conf + mountPath: /etc/hive-conf + - name: spark-conf + mountPath: /etc/spark-conf + {{- end }} - name: conf mountPath: {{ .Values.linkis.locations.confDir }} - name: log mountPath: {{ .Values.linkis.locations.logDir }} - name: runtime mountPath: {{ .Values.linkis.locations.runtimeDir }} + - name: common + mountPath: {{ .Values.linkis.locations.commonDir }} resources: {{- toYaml .Values.cgEnginePlugin.resources | nindent 12 }} {{- with .Values.cgEnginePlugin.nodeSelector }} diff --git a/linkis-dist/helm/charts/linkis/templates/linkis-cg-entrance.yaml b/linkis-dist/helm/charts/linkis/templates/linkis-cg-entrance.yaml index 727a37d7215..426fb105dc9 100644 --- a/linkis-dist/helm/charts/linkis/templates/linkis-cg-entrance.yaml +++ b/linkis-dist/helm/charts/linkis/templates/linkis-cg-entrance.yaml @@ -98,6 +98,10 @@ spec: name: {{ include "linkis.fullname" . }}-linkis-config - name: log emptyDir: {} + - name: common + hostPath: + path: {{ .Values.linkis.locations.commonDir }} + type: DirectoryOrCreate - name: runtime {{- if .Values.linkis.featureGates.localMode }} hostPath: @@ -106,6 +110,13 @@ spec: {{- else }} emptyDir: {} {{- end }} + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: hadoop-conf + projected: + sources: + - configMap: + name: {{ .Values.linkis.deps.hadoop.configMapName }} + {{- end }} serviceAccountName: {{ include "linkis.serviceAccountName" . }} securityContext: {{- toYaml .Values.cgEntrance.podSecurityContext | nindent 8 }} @@ -119,7 +130,7 @@ spec: - /bin/bash - -ecx - >- - RUN_IN_FOREGROUND=true {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start cg-entrance + RUN_IN_FOREGROUND=true LINKIS_EXTENDED_LIB={{.Values.linkis.locations.commonDir}}/extendlib {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start cg-entrance ports: - name: "http" containerPort: {{ .Values.cgEntrance.port }} @@ -142,6 +153,10 @@ spec: - name: DEBUG_PORT value: "5005" {{- end }} + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: HADOOP_CONF_DIR + value: /etc/hadoop-conf + {{- end }} - name: SERVER_HEAP_SIZE value: {{ .Values.cgEntrance.jvmHeapSize }} - name: EUREKA_URL @@ -162,12 +177,18 @@ spec: {{ toYaml .Values.cgEntrance.envs.froms | indent 12 }} {{- end }} volumeMounts: + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: hadoop-conf + mountPath: /etc/hadoop-conf + {{- end }} - name: conf mountPath: {{ .Values.linkis.locations.confDir }} - name: log mountPath: {{ .Values.linkis.locations.logDir }} - name: runtime mountPath: {{ .Values.linkis.locations.runtimeDir }} + - name: common + mountPath: {{ .Values.linkis.locations.commonDir }} resources: {{- toYaml .Values.cgEntrance.resources | nindent 12 }} {{- with .Values.cgEntrance.nodeSelector }} diff --git a/linkis-dist/helm/charts/linkis/templates/linkis-cg-linkismanager.yaml b/linkis-dist/helm/charts/linkis/templates/linkis-cg-linkismanager.yaml index ad429ce0649..2167ae8976d 100644 --- a/linkis-dist/helm/charts/linkis/templates/linkis-cg-linkismanager.yaml +++ b/linkis-dist/helm/charts/linkis/templates/linkis-cg-linkismanager.yaml @@ -98,6 +98,10 @@ spec: name: {{ include "linkis.fullname" . }}-linkis-config - name: log emptyDir: {} + - name: common + hostPath: + path: {{ .Values.linkis.locations.commonDir }} + type: DirectoryOrCreate - name: runtime {{- if .Values.linkis.featureGates.localMode }} hostPath: @@ -119,7 +123,7 @@ spec: - /bin/bash - -ecx - >- - RUN_IN_FOREGROUND=true {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start cg-linkismanager + RUN_IN_FOREGROUND=true LINKIS_EXTENDED_LIB={{.Values.linkis.locations.commonDir}}/extendlib {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start cg-linkismanager ports: - name: "http" containerPort: {{ .Values.cgLinkisManager.port }} @@ -168,6 +172,8 @@ spec: mountPath: {{ .Values.linkis.locations.logDir }} - name: runtime mountPath: {{ .Values.linkis.locations.runtimeDir }} + - name: common + mountPath: {{ .Values.linkis.locations.commonDir }} resources: {{- toYaml .Values.cgLinkisManager.resources | nindent 12 }} {{- with .Values.cgLinkisManager.nodeSelector }} diff --git a/linkis-dist/helm/charts/linkis/templates/linkis-mg-eureka.yaml b/linkis-dist/helm/charts/linkis/templates/linkis-mg-eureka.yaml index f57be433944..301bcec0afc 100644 --- a/linkis-dist/helm/charts/linkis/templates/linkis-mg-eureka.yaml +++ b/linkis-dist/helm/charts/linkis/templates/linkis-mg-eureka.yaml @@ -100,6 +100,10 @@ spec: name: {{ include "linkis.fullname" . }}-linkis-config - name: log emptyDir: {} + - name: common + hostPath: + path: {{ .Values.linkis.locations.commonDir }} + type: DirectoryOrCreate {{- if .Values.imagePullSecrets }} imagePullSecrets: {{ toYaml .Values.imagePullSecrets | indent 8 }} @@ -116,7 +120,7 @@ spec: - /bin/bash - -ecx - >- - RUN_IN_FOREGROUND=true {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start mg-eureka + RUN_IN_FOREGROUND=true LINKIS_EXTENDED_LIB={{.Values.linkis.locations.commonDir}}/extendlib {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start mg-eureka ports: - name: "http" containerPort: {{ .Values.mgEureka.port }} @@ -166,3 +170,5 @@ spec: mountPath: {{ .Values.linkis.locations.confDir }} - name: log mountPath: {{ .Values.linkis.locations.logDir }} + - name: common + mountPath: {{ .Values.linkis.locations.commonDir }} diff --git a/linkis-dist/helm/charts/linkis/templates/linkis-mg-gateway.yaml b/linkis-dist/helm/charts/linkis/templates/linkis-mg-gateway.yaml index 2b69291f5e5..06d3a8c1f95 100644 --- a/linkis-dist/helm/charts/linkis/templates/linkis-mg-gateway.yaml +++ b/linkis-dist/helm/charts/linkis/templates/linkis-mg-gateway.yaml @@ -98,6 +98,10 @@ spec: name: {{ include "linkis.fullname" . }}-linkis-config - name: log emptyDir: {} + - name: common + hostPath: + path: {{ .Values.linkis.locations.commonDir }} + type: DirectoryOrCreate - name: runtime {{- if .Values.linkis.featureGates.localMode }} hostPath: @@ -119,7 +123,7 @@ spec: - /bin/bash - -ecx - >- - RUN_IN_FOREGROUND=true {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start mg-gateway + RUN_IN_FOREGROUND=true LINKIS_EXTENDED_LIB={{.Values.linkis.locations.commonDir}}/extendlib {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start mg-gateway ports: - name: "http" containerPort: {{ .Values.mgGateway.port }} @@ -168,6 +172,8 @@ spec: mountPath: {{ .Values.linkis.locations.logDir }} - name: runtime mountPath: {{ .Values.linkis.locations.runtimeDir }} + - name: common + mountPath: {{ .Values.linkis.locations.commonDir }} resources: {{- toYaml .Values.mgGateway.resources | nindent 12 }} {{- with .Values.mgGateway.nodeSelector }} diff --git a/linkis-dist/helm/charts/linkis/templates/linkis-ps-cs.yaml b/linkis-dist/helm/charts/linkis/templates/linkis-ps-cs.yaml deleted file mode 100644 index 994d3c022fc..00000000000 --- a/linkis-dist/helm/charts/linkis/templates/linkis-ps-cs.yaml +++ /dev/null @@ -1,184 +0,0 @@ ---- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -kind: Service -metadata: - name: {{ include "linkis.fullname" . }}-ps-cs - labels: - app: {{ include "linkis.fullname" . }}-ps-cs - {{- include "linkis.psCs.labels" . | nindent 4 }} - annotations: - prometheus.io/path: {{ .Values.psCs.prometheus.metricsPath }} - prometheus.io/port: '{{ .Values.psCs.port }}' - prometheus.io/scrape: 'true' -spec: - ports: - - name: "http" - protocol: TCP - port: {{ .Values.psCs.port }} - selector: - {{- include "linkis.psCs.selectorLabels" . | nindent 4 }} - app: {{ include "linkis.fullname" . }}-ps-cs - {{- toYaml .Values.svc | nindent 2 }} ---- -apiVersion: v1 -kind: Service -metadata: - name: {{ include "linkis.fullname" . }}-ps-cs-headless - labels: - app: {{ include "linkis.fullname" . }}-ps-cs - {{- include "linkis.psCs.labels" . | nindent 4 }} - annotations: - prometheus.io/path: {{ .Values.psCs.prometheus.metricsPath }} - prometheus.io/port: '{{ .Values.psCs.port }}' - prometheus.io/scrape: 'true' -spec: - ports: - - name: "http" - protocol: TCP - port: {{ .Values.psCs.port }} - selector: - {{- include "linkis.psCs.selectorLabels" . | nindent 4 }} - app: {{ include "linkis.fullname" . }}-ps-cs - clusterIP: None - type: ClusterIP - publishNotReadyAddresses: true ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "linkis.fullname" . }}-ps-cs - labels: - app: {{ include "linkis.fullname" . }}-ps-cs - version: {{ .Chart.AppVersion }} - {{- include "linkis.psCs.labels" . | nindent 4 }} - {{- if .Values.psCs.annotations }} - annotations: - {{- toYaml .Values.psCs.annotations | nindent 4 }} - {{- end }} -spec: - replicas: {{ .Values.psCs.replicas }} - selector: - matchLabels: - {{- include "linkis.psCs.selectorLabels" . | nindent 6 }} - app: {{ include "linkis.fullname" . }}-ps-cs - template: - metadata: - {{- with .Values.psCs.annotations }} - annotations: - {{- toYaml . | nindent 8 }} - {{- end }} - labels: - app: {{ include "linkis.fullname" . }}-ps-cs - version: {{ .Chart.AppVersion }} - {{- include "linkis.psCs.selectorLabels" . | nindent 8 }} - spec: - subdomain: {{ include "linkis.fullname" . }}-ps-cs-headless - {{- with .Values.imagePullSecrets }} - imagePullSecrets: - {{- toYaml . | nindent 8 }} - {{- end }} - volumes: - - name: conf - configMap: - name: {{ include "linkis.fullname" . }}-linkis-config - - name: log - emptyDir: {} - - name: runtime - {{- if .Values.linkis.featureGates.localMode }} - hostPath: - path: {{ .Values.linkis.locations.hostPath }} - type: DirectoryOrCreate - {{- else }} - emptyDir: {} - {{- end }} - serviceAccountName: {{ include "linkis.serviceAccountName" . }} - securityContext: - {{- toYaml .Values.psCs.podSecurityContext | nindent 8 }} - containers: - - name: "cs" - securityContext: - {{- toYaml .Values.psCs.securityContext | nindent 12 }} - image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" - imagePullPolicy: {{ .Values.image.pullPolicy }} - command: - - /bin/bash - - -ecx - - >- - RUN_IN_FOREGROUND=true {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start ps-cs - ports: - - name: "http" - containerPort: {{ .Values.psCs.port }} - protocol: TCP - {{- if .Values.linkis.featureGates.enableJvmRemoteDebug }} - - name: "debug" - containerPort: 5005 - protocol: TCP - {{- end }} - # TODO: replace with httpGet when spring-boot readiness probe is implemented. - readinessProbe: - initialDelaySeconds: 15 - periodSeconds: 5 - timeoutSeconds: 20 - failureThreshold: 10 - tcpSocket: - port: {{ .Values.psCs.port }} - env: - {{- if .Values.linkis.featureGates.enableJvmRemoteDebug }} - - name: DEBUG_PORT - value: "5005" - {{- end }} - - name: SERVER_HEAP_SIZE - value: {{ .Values.psCs.jvmHeapSize }} - - name: EUREKA_URL - value: {{- include "linkis.registration.url" . | quote | indent 1 }} - - name: EUREKA_PREFER_IP - value: "true" - - name: EUREKA_PORT - value: "{{ .Values.mgEureka.port }}" - - name: POD_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - {{- if .Values.psCs.envs.extras }} -{{ toYaml .Values.psCs.envs.extras | indent 12 }} - {{- end }} - {{- if .Values.psCs.envs.froms }} - envFrom: -{{ toYaml .Values.psCs.envs.froms | indent 12 }} - {{- end }} - volumeMounts: - - name: conf - mountPath: {{ .Values.linkis.locations.confDir }} - - name: log - mountPath: {{ .Values.linkis.locations.logDir }} - - name: runtime - mountPath: {{ .Values.linkis.locations.runtimeDir }} - resources: - {{- toYaml .Values.psCs.resources | nindent 12 }} - {{- with .Values.psCs.nodeSelector }} - nodeSelector: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.psCs.affinity }} - affinity: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.psCs.tolerations }} - tolerations: - {{- toYaml . | nindent 8 }} - {{- end }} diff --git a/linkis-dist/helm/charts/linkis/templates/linkis-ps-data-source-manager.yaml b/linkis-dist/helm/charts/linkis/templates/linkis-ps-data-source-manager.yaml deleted file mode 100644 index 0e3c6469e04..00000000000 --- a/linkis-dist/helm/charts/linkis/templates/linkis-ps-data-source-manager.yaml +++ /dev/null @@ -1,184 +0,0 @@ ---- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -kind: Service -metadata: - name: {{ include "linkis.fullname" . }}-ps-data-source-manager - labels: - app: {{ include "linkis.fullname" . }}-ps-data-source-manager - {{- include "linkis.psDataSourceManager.labels" . | nindent 4 }} - annotations: - prometheus.io/path: {{ .Values.psDataSourceManager.prometheus.metricsPath }} - prometheus.io/port: '{{ .Values.psDataSourceManager.port }}' - prometheus.io/scrape: 'true' -spec: - ports: - - name: "http" - protocol: TCP - port: {{ .Values.psDataSourceManager.port }} - selector: - {{- include "linkis.psDataSourceManager.selectorLabels" . | nindent 4 }} - app: {{ include "linkis.fullname" . }}-ps-data-source-manager - {{- toYaml .Values.svc | nindent 2 }} ---- -apiVersion: v1 -kind: Service -metadata: - name: {{ include "linkis.fullname" . }}-ps-data-source-manager-headless - labels: - app: {{ include "linkis.fullname" . }}-ps-data-source-manager - {{- include "linkis.psDataSourceManager.labels" . | nindent 4 }} - annotations: - prometheus.io/path: {{ .Values.psDataSourceManager.prometheus.metricsPath }} - prometheus.io/port: '{{ .Values.psDataSourceManager.port }}' - prometheus.io/scrape: 'true' -spec: - ports: - - name: "http" - protocol: TCP - port: {{ .Values.psDataSourceManager.port }} - selector: - {{- include "linkis.psDataSourceManager.selectorLabels" . | nindent 4 }} - app: {{ include "linkis.fullname" . }}-ps-data-source-manager - clusterIP: None - type: ClusterIP - publishNotReadyAddresses: true ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "linkis.fullname" . }}-ps-data-source-manager - labels: - app: {{ include "linkis.fullname" . }}-ps-data-source-manager - version: {{ .Chart.AppVersion }} - {{- include "linkis.psDataSourceManager.labels" . | nindent 4 }} - {{- if .Values.psDataSourceManager.annotations }} - annotations: - {{- toYaml .Values.psDataSourceManager.annotations | nindent 4 }} - {{- end }} -spec: - replicas: {{ .Values.psDataSourceManager.replicas }} - selector: - matchLabels: - {{- include "linkis.psDataSourceManager.selectorLabels" . | nindent 6 }} - app: {{ include "linkis.fullname" . }}-ps-data-source-manager - template: - metadata: - {{- with .Values.psDataSourceManager.annotations }} - annotations: - {{- toYaml . | nindent 8 }} - {{- end }} - labels: - app: {{ include "linkis.fullname" . }}-ps-data-source-manager - version: {{ .Chart.AppVersion }} - {{- include "linkis.psDataSourceManager.selectorLabels" . | nindent 8 }} - spec: - subdomain: {{ include "linkis.fullname" . }}-ps-data-source-manager-headless - {{- with .Values.imagePullSecrets }} - imagePullSecrets: - {{- toYaml . | nindent 8 }} - {{- end }} - volumes: - - name: conf - configMap: - name: {{ include "linkis.fullname" . }}-linkis-config - - name: log - emptyDir: {} - - name: runtime - {{- if .Values.linkis.featureGates.localMode }} - hostPath: - path: {{ .Values.linkis.locations.hostPath }} - type: DirectoryOrCreate - {{- else }} - emptyDir: {} - {{- end }} - serviceAccountName: {{ include "linkis.serviceAccountName" . }} - securityContext: - {{- toYaml .Values.psDataSourceManager.podSecurityContext | nindent 8 }} - containers: - - name: "data-source-manager" - securityContext: - {{- toYaml .Values.psDataSourceManager.securityContext | nindent 12 }} - image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" - imagePullPolicy: {{ .Values.image.pullPolicy }} - command: - - /bin/bash - - -ecx - - >- - RUN_IN_FOREGROUND=true {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start ps-data-source-manager - ports: - - name: "http" - containerPort: {{ .Values.psDataSourceManager.port }} - protocol: TCP - {{- if .Values.linkis.featureGates.enableJvmRemoteDebug }} - - name: "debug" - containerPort: 5005 - protocol: TCP - {{- end }} - # TODO: replace with httpGet when spring-boot readiness probe is implemented. - readinessProbe: - initialDelaySeconds: 15 - periodSeconds: 5 - timeoutSeconds: 20 - failureThreshold: 10 - tcpSocket: - port: {{ .Values.psDataSourceManager.port }} - env: - {{- if .Values.linkis.featureGates.enableJvmRemoteDebug }} - - name: DEBUG_PORT - value: "5005" - {{- end }} - - name: SERVER_HEAP_SIZE - value: {{ .Values.psDataSourceManager.jvmHeapSize }} - - name: EUREKA_URL - value: {{- include "linkis.registration.url" . | quote | indent 1 }} - - name: EUREKA_PREFER_IP - value: "true" - - name: EUREKA_PORT - value: "{{ .Values.mgEureka.port }}" - - name: POD_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - {{- if .Values.psDataSourceManager.envs.extras }} -{{ toYaml .Values.psDataSourceManager.envs.extras | indent 12 }} - {{- end }} - {{- if .Values.psDataSourceManager.envs.froms }} - envFrom: -{{ toYaml .Values.psDataSourceManager.envs.froms | indent 12 }} - {{- end }} - volumeMounts: - - name: conf - mountPath: {{ .Values.linkis.locations.confDir }} - - name: log - mountPath: {{ .Values.linkis.locations.logDir }} - - name: runtime - mountPath: {{ .Values.linkis.locations.runtimeDir }} - resources: - {{- toYaml .Values.psDataSourceManager.resources | nindent 12 }} - {{- with .Values.psDataSourceManager.nodeSelector }} - nodeSelector: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.psDataSourceManager.affinity }} - affinity: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.psDataSourceManager.tolerations }} - tolerations: - {{- toYaml . | nindent 8 }} - {{- end }} diff --git a/linkis-dist/helm/charts/linkis/templates/linkis-ps-metadataquery.yaml b/linkis-dist/helm/charts/linkis/templates/linkis-ps-metadataquery.yaml deleted file mode 100644 index 4da8dd257da..00000000000 --- a/linkis-dist/helm/charts/linkis/templates/linkis-ps-metadataquery.yaml +++ /dev/null @@ -1,187 +0,0 @@ ---- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -{{- if .Values.linkis.featureGates.enableMetadataQuery }} -apiVersion: v1 -kind: Service -metadata: - name: {{ include "linkis.fullname" . }}-ps-metadataquery - labels: - app: {{ include "linkis.fullname" . }}-ps-metadataquery - {{- include "linkis.psMetadataQuery.labels" . | nindent 4 }} - annotations: - prometheus.io/path: {{ .Values.psMetadataQuery.prometheus.metricsPath }} - prometheus.io/port: '{{ .Values.psMetadataQuery.port }}' - prometheus.io/scrape: 'true' -spec: - ports: - - name: "http" - protocol: TCP - port: {{ .Values.psMetadataQuery.port }} - selector: - {{- include "linkis.psMetadataQuery.selectorLabels" . | nindent 4 }} - app: {{ include "linkis.fullname" . }}-ps-metadataquery - {{- toYaml .Values.svc | nindent 2 }} ---- -apiVersion: v1 -kind: Service -metadata: - name: {{ include "linkis.fullname" . }}-ps-metadataquery-headless - labels: - app: {{ include "linkis.fullname" . }}-ps-metadataquery - {{- include "linkis.psMetadataQuery.labels" . | nindent 4 }} - annotations: - prometheus.io/path: {{ .Values.psMetadataQuery.prometheus.metricsPath }} - prometheus.io/port: '{{ .Values.psMetadataQuery.port }}' - prometheus.io/scrape: 'true' -spec: - ports: - - name: "http" - protocol: TCP - port: {{ .Values.psMetadataQuery.port }} - selector: - {{- include "linkis.psMetadataQuery.selectorLabels" . | nindent 4 }} - app: {{ include "linkis.fullname" . }}-ps-metadataquery - clusterIP: None - type: ClusterIP - publishNotReadyAddresses: true ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "linkis.fullname" . }}-ps-metadataquery - labels: - app: {{ include "linkis.fullname" . }}-ps-metadataquery - version: {{ .Chart.AppVersion }} - {{- include "linkis.psMetadataQuery.labels" . | nindent 4 }} - {{- if .Values.psMetadataQuery.annotations }} - annotations: - {{- toYaml .Values.psMetadataQuery.annotations | nindent 4 }} - {{- end }} -spec: - replicas: {{ .Values.psMetadataQuery.replicas }} - selector: - matchLabels: - {{- include "linkis.psMetadataQuery.selectorLabels" . | nindent 6 }} - app: {{ include "linkis.fullname" . }}-ps-metadataquery - template: - metadata: - {{- with .Values.psMetadataQuery.annotations }} - annotations: - {{- toYaml . | nindent 8 }} - {{- end }} - labels: - app: {{ include "linkis.fullname" . }}-ps-metadataquery - version: {{ .Chart.AppVersion }} - {{- include "linkis.psMetadataQuery.selectorLabels" . | nindent 8 }} - spec: - subdomain: {{ include "linkis.fullname" . }}-ps-metadataquery-headless - {{- with .Values.imagePullSecrets }} - imagePullSecrets: - {{- toYaml . | nindent 8 }} - {{- end }} - volumes: - - name: conf - configMap: - name: {{ include "linkis.fullname" . }}-linkis-config - - name: log - emptyDir: {} - - name: runtime - {{- if .Values.linkis.featureGates.localMode }} - hostPath: - path: {{ .Values.linkis.locations.hostPath }} - type: DirectoryOrCreate - {{- else }} - emptyDir: {} - {{- end }} - serviceAccountName: {{ include "linkis.serviceAccountName" . }} - securityContext: - {{- toYaml .Values.psMetadataQuery.podSecurityContext | nindent 8 }} - containers: - - name: "metadataquery" - securityContext: - {{- toYaml .Values.psMetadataQuery.securityContext | nindent 12 }} - image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" - imagePullPolicy: {{ .Values.image.pullPolicy }} - command: - - /bin/bash - - -ecx - - >- - RUN_IN_FOREGROUND=true {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start ps-metadataquery - ports: - - name: "http" - containerPort: {{ .Values.psMetadataQuery.port }} - protocol: TCP - {{- if .Values.linkis.featureGates.enableJvmRemoteDebug }} - - name: "debug" - containerPort: 5005 - protocol: TCP - {{- end }} - # TODO: replace with httpGet when spring-boot readiness probe is implemented. - readinessProbe: - initialDelaySeconds: 15 - periodSeconds: 5 - timeoutSeconds: 20 - failureThreshold: 10 - tcpSocket: - port: {{ .Values.psMetadataQuery.port }} - env: - {{- if .Values.linkis.featureGates.enableJvmRemoteDebug }} - - name: DEBUG_PORT - value: "5005" - {{- end }} - - name: SERVER_HEAP_SIZE - value: {{ .Values.psMetadataQuery.jvmHeapSize }} - - name: EUREKA_URL - value: {{- include "linkis.registration.url" . | quote | indent 1 }} - - name: EUREKA_PREFER_IP - value: "true" - - name: EUREKA_PORT - value: "{{ .Values.mgEureka.port }}" - - name: POD_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - {{- if .Values.psMetadataQuery.envs.extras }} -{{ toYaml .Values.psMetadataQuery.envs.extras | indent 12 }} - {{- end }} - {{- if .Values.psMetadataQuery.envs.froms }} - envFrom: -{{ toYaml .Values.psMetadataQuery.envs.froms | indent 12 }} - {{- end }} - volumeMounts: - - name: conf - mountPath: {{ .Values.linkis.locations.confDir }} - - name: log - mountPath: {{ .Values.linkis.locations.logDir }} - - name: runtime - mountPath: {{ .Values.linkis.locations.runtimeDir }} - resources: - {{- toYaml .Values.psMetadataQuery.resources | nindent 12 }} - {{- with .Values.psMetadataQuery.nodeSelector }} - nodeSelector: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.psMetadataQuery.affinity }} - affinity: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.psMetadataQuery.tolerations }} - tolerations: - {{- toYaml . | nindent 8 }} - {{- end }} - -{{- end }} diff --git a/linkis-dist/helm/charts/linkis/templates/linkis-ps-publicservice.yaml b/linkis-dist/helm/charts/linkis/templates/linkis-ps-publicservice.yaml index a19457ab771..3b3e87c3e4d 100644 --- a/linkis-dist/helm/charts/linkis/templates/linkis-ps-publicservice.yaml +++ b/linkis-dist/helm/charts/linkis/templates/linkis-ps-publicservice.yaml @@ -98,6 +98,10 @@ spec: name: {{ include "linkis.fullname" . }}-linkis-config - name: log emptyDir: {} + - name: common + hostPath: + path: {{ .Values.linkis.locations.commonDir }} + type: DirectoryOrCreate - name: runtime {{- if .Values.linkis.featureGates.localMode }} hostPath: @@ -106,6 +110,18 @@ spec: {{- else }} emptyDir: {} {{- end }} + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: hadoop-conf + projected: + sources: + - configMap: + name: {{ .Values.linkis.deps.hadoop.configMapName }} + - name: hive-conf + projected: + sources: + - configMap: + name: {{ .Values.linkis.deps.hive.configMapName }} + {{- end }} serviceAccountName: {{ include "linkis.serviceAccountName" . }} securityContext: {{- toYaml .Values.psPublicService.podSecurityContext | nindent 8 }} @@ -119,7 +135,7 @@ spec: - /bin/bash - -ecx - >- - RUN_IN_FOREGROUND=true {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start ps-publicservice + RUN_IN_FOREGROUND=true LINKIS_EXTENDED_LIB={{.Values.linkis.locations.commonDir}}/extendlib {{ .Values.linkis.locations.homeDir }}/sbin/linkis-daemon.sh start ps-publicservice ports: - name: "http" containerPort: {{ .Values.psPublicService.port }} @@ -142,6 +158,12 @@ spec: - name: DEBUG_PORT value: "5005" {{- end }} + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: HADOOP_CONF_DIR + value: /etc/hadoop-conf + - name: HIVE_CONF_DIR + value: /etc/hive-conf + {{- end }} - name: SERVER_HEAP_SIZE value: {{ .Values.psPublicService.jvmHeapSize }} - name: EUREKA_URL @@ -162,12 +184,20 @@ spec: {{ toYaml .Values.psPublicService.envs.froms | indent 12 }} {{- end }} volumeMounts: + {{- if eq .Values.linkis.featureGates.localMode false }} + - name: hadoop-conf + mountPath: /etc/hadoop-conf + - name: hive-conf + mountPath: /etc/hive-conf + {{- end }} - name: conf mountPath: {{ .Values.linkis.locations.confDir }} - name: log mountPath: {{ .Values.linkis.locations.logDir }} - name: runtime mountPath: {{ .Values.linkis.locations.runtimeDir }} + - name: common + mountPath: {{ .Values.linkis.locations.commonDir }} resources: {{- toYaml .Values.psPublicService.resources | nindent 12 }} {{- with .Values.psPublicService.nodeSelector }} diff --git a/linkis-dist/helm/charts/linkis/values.yaml b/linkis-dist/helm/charts/linkis/values.yaml index d60784b0560..638f75134e3 100644 --- a/linkis-dist/helm/charts/linkis/values.yaml +++ b/linkis-dist/helm/charts/linkis/values.yaml @@ -36,6 +36,7 @@ image: # Overrides the image tag whose default is the chart appVersion. tag: "" imagePullSecrets: [] +withJdbcDriverJar: false serviceAccount: create: true annotations: {} @@ -85,6 +86,7 @@ linkis: confDir: /etc/linkis-conf logDir: /var/logs/linkis runtimeDir: /opt/linkis-runtime + commonDir: /opt/common # hostPath is a directory on the host machine. # In KinD cluster, it is actual a directory in KinD's # node container, which is specialized in KinD cluster @@ -110,8 +112,9 @@ linkis: version: 2.7 hadoop: version: 2.7.2 + configMapName: hadoop-conf yarn: - restfulUrl: http://localhost:8080 + restfulUrl: http://ldh.ldh.svc.cluster.local:8088 authEnable: false authUser: hadoop authPassword: "123456" @@ -121,12 +124,20 @@ linkis: krb5: /etc/krb5.keytab spark: version: 2.4.3 + configMapName: spark-conf hive: version: 2.3.3 + configMapName: hive-conf meta: - url: "" # jdbc:mysql://localhost:3306/metastore?useUnicode=true - user: "" # root - password: "" # 123456 + url: "jdbc:mysql://mysql.mysql.svc.cluster.local:3306/hive_metadata?&createDatabaseIfNotExist=true&characterEncoding=UTF-8&useSSL=false" # jdbc:mysql://localhost:3306/metastore?useUnicode=true + user: "root" # root + password: "123456" # 123456 + zookeeper: + version: 3.5.9 + configMapName: zookeeper-conf + flink: + version: 1.12.2 + configMapName: flink-conf mgEureka: replicas: 1 @@ -276,54 +287,9 @@ psPublicService: extras: [] froms: [] -psCs: - replicas: 1 - port: 9108 - jvmHeapSize: "512M" - prometheus: - metricsPath: metrics - annotations: {} - nodeSelector: {} - tolerations: [] - affinity: {} - resources: {} - envs: - extras: [] - froms: [] - -psDataSourceManager: - replicas: 1 - port: 9109 - jvmHeapSize: "512M" - prometheus: - metricsPath: metrics - annotations: {} - nodeSelector: {} - tolerations: [] - affinity: {} - resources: {} - envs: - extras: [] - froms: [] - -psMetadataQuery: - replicas: 1 - port: 9110 - jvmHeapSize: "512M" - prometheus: - metricsPath: metrics - annotations: {} - nodeSelector: {} - tolerations: [] - affinity: {} - resources: {} - envs: - extras: [] - froms: [] - Web: replicas: 1 - port: 8087 + port: 8088 prometheus: metricsPath: metrics annotations: {} diff --git a/linkis-dist/helm/scripts/common.sh b/linkis-dist/helm/scripts/common.sh index 596b0dd8874..546cf2ebe44 100755 --- a/linkis-dist/helm/scripts/common.sh +++ b/linkis-dist/helm/scripts/common.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/helm/scripts/create-kind-cluster.sh b/linkis-dist/helm/scripts/create-kind-cluster.sh index a804b6c4b60..1e010f3bb0b 100755 --- a/linkis-dist/helm/scripts/create-kind-cluster.sh +++ b/linkis-dist/helm/scripts/create-kind-cluster.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -27,11 +27,17 @@ set -e echo "# Creating KinD cluster ..." # create data dir for KinD cluster KIND_CLUSTER_HOST_PATH=${TMP_DIR}/data + +# Shared storage of some common dependent packages such as mysql-connector-java-*.jar +# Values.linkis.locations.commonDir -> kind docker /opt/data/common -> vm ${KIND_COMMON_PATH} +KIND_COMMON_PATH=/opt/data/common/ + mkdir -p ${KIND_CLUSTER_HOST_PATH} # create kind cluster conf KIND_CLUSTER_CONF_TPL=${RESOURCE_DIR}/kind-cluster.yaml KIND_CLUSTER_CONF_FILE=${TMP_DIR}/kind-cluster.yaml -KIND_CLUSTER_HOST_PATH=${KIND_CLUSTER_HOST_PATH} envsubst < ${KIND_CLUSTER_CONF_TPL} > ${KIND_CLUSTER_CONF_FILE} +KIND_COMMON_PATH=${KIND_COMMON_PATH} KIND_CLUSTER_HOST_PATH=${KIND_CLUSTER_HOST_PATH} \ +envsubst < ${KIND_CLUSTER_CONF_TPL} > ${KIND_CLUSTER_CONF_FILE} echo "- kind cluster config: ${KIND_CLUSTER_CONF_FILE}" cat ${KIND_CLUSTER_CONF_FILE} diff --git a/linkis-dist/package/sbin/ext/linkis-cg-engineplugin b/linkis-dist/helm/scripts/install-charts-with-ldh.sh old mode 100644 new mode 100755 similarity index 64% rename from linkis-dist/package/sbin/ext/linkis-cg-engineplugin rename to linkis-dist/helm/scripts/install-charts-with-ldh.sh index 460bf1556cc..1ca8a20aace --- a/linkis-dist/package/sbin/ext/linkis-cg-engineplugin +++ b/linkis-dist/helm/scripts/install-charts-with-ldh.sh @@ -13,21 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# description: ecp start cmd # -# Modified for Linkis 1.0.0 +WORK_DIR=`cd $(dirname $0); pwd -P` -export SERVER_SUFFIX="linkis-computation-governance/linkis-cg-engineplugin" +. ${WORK_DIR}/common.sh -export SERVER_CLASS=org.apache.linkis.engineplugin.server.LinkisEngineConnPluginServer +KUBE_NAMESPACE=${1:-linkis} +HELM_RELEASE_NAME=${2:-linkis-demo} -#export DEBUG_PORT= +sh ${WORK_DIR}/install-linkis.sh ${KUBE_NAMESPACE} ${HELM_RELEASE_NAME} false -export COMMON_START_BIN=$LINKIS_HOME/sbin/ext/linkis-common-start -if [[ ! -f "${COMMON_START_BIN}" ]]; then - echo "The $COMMON_START_BIN does not exist!" - exit 1 -else - sh $COMMON_START_BIN -fi \ No newline at end of file +kubectl apply -n ${KUBE_NAMESPACE} -f ${RESOURCE_DIR}/ldh/configmaps diff --git a/linkis-dist/helm/scripts/install-charts.sh b/linkis-dist/helm/scripts/install-charts.sh index 9f25a433e58..38d00d1d0a3 100755 --- a/linkis-dist/helm/scripts/install-charts.sh +++ b/linkis-dist/helm/scripts/install-charts.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -22,25 +22,4 @@ WORK_DIR=`cd $(dirname $0); pwd -P` KUBE_NAMESPACE=${1:-linkis} HELM_RELEASE_NAME=${2:-linkis-demo} -. ${WORK_DIR}/common.sh - -if [ "X${HELM_DEBUG}" == "Xtrue" ]; then - # template helm charts - helm template --namespace ${KUBE_NAMESPACE} -f ${LINKIS_CHART_DIR}/values.yaml ${HELM_RELEASE_NAME} ${LINKIS_CHART_DIR} -else - # create hadoop configs - if [ "X${WITH_LDH}" == "Xtrue" ]; then - kubectl apply -n ${KUBE_NAMESPACE} -f ${RESOURCE_DIR}/ldh/configmaps - fi - # load image - if [ "X${KIND_LOAD_IMAGE}" == "Xtrue" ]; then - echo "# Loading Linkis image ..." - kind load docker-image linkis:${LINKIS_IMAGE_TAG} --name ${KIND_CLUSTER_NAME} - kind load docker-image linkis-web:${LINKIS_IMAGE_TAG} --name ${KIND_CLUSTER_NAME} - fi - # install helm charts - helm install --create-namespace --namespace ${KUBE_NAMESPACE} \ - -f ${LINKIS_CHART_DIR}/values.yaml \ - --set image.tag=${LINKIS_IMAGE_TAG} \ - ${HELM_RELEASE_NAME} ${LINKIS_CHART_DIR} -fi +sh ${WORK_DIR}/install-linkis.sh ${KUBE_NAMESPACE} ${HELM_RELEASE_NAME} true diff --git a/linkis-dist/helm/scripts/install-ldh.sh b/linkis-dist/helm/scripts/install-ldh.sh index 9736b4c7af3..40ee930963a 100755 --- a/linkis-dist/helm/scripts/install-ldh.sh +++ b/linkis-dist/helm/scripts/install-ldh.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -32,7 +32,12 @@ fi # deploy LDH echo "# Deploying LDH ..." -kubectl create ns ldh +set +e +x=`kubectl get ns ldh 2> /dev/null` +set -e +if [ "X${x}" == "X" ]; then + kubectl create ns ldh +fi kubectl apply -n ldh -f ${RESOURCE_DIR}/ldh/configmaps LDH_VERSION=${LDH_VERSION} envsubst < ${RESOURCE_DIR}/ldh/ldh.yaml | kubectl apply -n ldh -f - diff --git a/linkis-dist/helm/scripts/install-linkis.sh b/linkis-dist/helm/scripts/install-linkis.sh new file mode 100755 index 00000000000..75abd25ac54 --- /dev/null +++ b/linkis-dist/helm/scripts/install-linkis.sh @@ -0,0 +1,48 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +WORK_DIR=`cd $(dirname $0); pwd -P` + +. ${WORK_DIR}/common.sh + +KUBE_NAMESPACE=${1:-linkis} +HELM_RELEASE_NAME=${2:-linkis-demo} +LOCAL_MODE=${3:-true} + +. ${WORK_DIR}/common.sh + +if [ "X${HELM_DEBUG}" == "Xtrue" ]; then + # template helm charts + helm template --namespace ${KUBE_NAMESPACE} -f ${LINKIS_CHART_DIR}/values.yaml ${HELM_RELEASE_NAME} ${LINKIS_CHART_DIR} +else + # create hadoop configs + if [ "X${WITH_LDH}" == "Xtrue" ]; then + kubectl apply -n ${KUBE_NAMESPACE} -f ${RESOURCE_DIR}/ldh/configmaps + fi + # load image + if [ "X${KIND_LOAD_IMAGE}" == "Xtrue" ]; then + echo "# Loading Linkis image ..." + kind load docker-image linkis:${LINKIS_IMAGE_TAG} --name ${KIND_CLUSTER_NAME} + kind load docker-image linkis-web:${LINKIS_IMAGE_TAG} --name ${KIND_CLUSTER_NAME} + fi + # install helm charts + echo "# Installing linkis, image tag=${LINKIS_IMAGE_TAG},local mode=${LOCAL_MODE} ..." + helm install --create-namespace --namespace ${KUBE_NAMESPACE} \ + -f ${LINKIS_CHART_DIR}/values.yaml \ + --set image.tag=${LINKIS_IMAGE_TAG},linkis.featureGates.localMode=${LOCAL_MODE} \ + ${HELM_RELEASE_NAME} ${LINKIS_CHART_DIR} +fi diff --git a/linkis-dist/helm/scripts/install-mysql.sh b/linkis-dist/helm/scripts/install-mysql.sh index 208d09407ce..acb1be7250d 100755 --- a/linkis-dist/helm/scripts/install-mysql.sh +++ b/linkis-dist/helm/scripts/install-mysql.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/helm/scripts/login-pod.sh b/linkis-dist/helm/scripts/login-pod.sh index 1bb391928d6..36032ee0cb4 100755 --- a/linkis-dist/helm/scripts/login-pod.sh +++ b/linkis-dist/helm/scripts/login-pod.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -17,6 +17,8 @@ WORK_DIR=`cd $(dirname $0); pwd -P` +. ${WORK_DIR}/common.sh + COMPONENT_NAME=$1 LINKIS_KUBE_NAMESPACE=linkis @@ -29,4 +31,33 @@ login() { kubectl exec -it -n ${LINKIS_KUBE_NAMESPACE} ${POD_NAME} -- bash } -login ${COMPONENT_NAME} +login_ldh() { + + echo "- login [ldh]'s bash ..." + POD_NAME=`kubectl get pods -n ldh -l app=ldh -o jsonpath='{.items[0].metadata.name}'` + kubectl exec -it -n ldh ${POD_NAME} -- bash + +} + +login_mysql() { + echo "- login [mysql]'s bash ..." + POD_NAME=`kubectl get pods -n mysql -l app=mysql -o jsonpath='{.items[0].metadata.name}'` + kubectl exec -it -n mysql ${POD_NAME} -- bash +} +login_kind(){ + echo "- login [kind]'s bash ..." + DOCKER_ID=`docker ps -aqf name=${KIND_CLUSTER_NAME}-control-plane` + docker exec -it ${DOCKER_ID} bash +} + +if [ "${COMPONENT_NAME}" == "ldh" ]; then + login_ldh ${COMPONENT_NAME} +elif [ "${COMPONENT_NAME}" == "mysql" ]; then + login_mysql ${COMPONENT_NAME} +elif [ "${COMPONENT_NAME}" == "kind" ]; then + login_kind ${COMPONENT_NAME} +else + login ${COMPONENT_NAME} +fi + + diff --git a/linkis-dist/helm/scripts/logs-pod.sh b/linkis-dist/helm/scripts/logs-pod.sh new file mode 100644 index 00000000000..eaed1381f14 --- /dev/null +++ b/linkis-dist/helm/scripts/logs-pod.sh @@ -0,0 +1,52 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +WORK_DIR=`cd $(dirname $0); pwd -P` + +COMPONENT_NAME=$1 + +LINKIS_KUBE_NAMESPACE=linkis +LINKIS_INSTANCE_NAME=linkis-demo + +logs() { + component_name=$1 + POD_NAME=`kubectl get pods -n ${LINKIS_KUBE_NAMESPACE} -l app.kubernetes.io/instance=${LINKIS_INSTANCE_NAME}-${component_name} -o jsonpath='{.items[0].metadata.name}'` + kubectl logs -n ${LINKIS_KUBE_NAMESPACE} ${POD_NAME} -f +} + +logs_ldh() { + POD_NAME=`kubectl get pods -n ldh -l app=ldh -o jsonpath='{.items[0].metadata.name}'` + kubectl logs -n ldh ${POD_NAME} -f + +} + +logs_mysql() { + + POD_NAME=`kubectl get pods -n mysql -l app=mysql -o jsonpath='{.items[0].metadata.name}'` + kubectl logs -n mysql ${POD_NAME} -f +} + + +if [ "${COMPONENT_NAME}" == "ldh" ]; then + logs_ldh ${COMPONENT_NAME} +elif [ "${COMPONENT_NAME}" == "mysql" ]; then + logs_mysql ${COMPONENT_NAME} +else + logs ${COMPONENT_NAME} +fi + + diff --git a/linkis-dist/helm/scripts/prepare-for-spark.sh b/linkis-dist/helm/scripts/prepare-for-spark.sh new file mode 100644 index 00000000000..2bbd1123a38 --- /dev/null +++ b/linkis-dist/helm/scripts/prepare-for-spark.sh @@ -0,0 +1,51 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +## Temporarily solve the problem that the spark submit client fails +## and resolve the domain name of the yarn callback ecm when executing the spark on yarn task + +WORK_DIR=`cd $(dirname $0); pwd -P` + +## copy spark resource from ldh to linkis-cg-engineconnmanager + +LDH_POD_NAME=`kubectl get pods -n ldh -l app=ldh -o jsonpath='{.items[0].metadata.name}'` +kubectl cp -n ldh ${LDH_POD_NAME}:/opt/ldh/ ./ldh + +ECM_POD_NAME=`kubectl get pods -n linkis -l app.kubernetes.io/instance=linkis-demo-cg-engineconnmanager -o jsonpath='{.items[0].metadata.name}'` +kubectl cp ./ldh -n linkis ${ECM_POD_NAME}:/opt/ ; + + +kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "chmod +x /opt/ldh/1.3.0/spark-2.4.3-bin-hadoop2.7/bin/*" +kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "ln -s /opt/ldh/1.3.0/spark-2.4.3-bin-hadoop2.7 /opt/ldh/current/spark" +kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "ln -s /opt/ldh/1.3.0/hadoop-2.7.2 /opt/ldh/current/hadoop" +kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "ln -s /opt/ldh/1.3.0/apache-hive-2.3.3-bin /opt/ldh/current/hive" + + +kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "echo 'export SPARK_HOME=/opt/ldh/current/spark' |sudo tee --append /etc/profile" +kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "echo 'export PATH=\$SPARK_HOME/bin:\$PATH' |sudo tee --append /etc/profile" +kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "source /etc/profile" + +# add ecm dns for ldh pod +ECM_POD_IP=`kubectl get pods -n linkis -l app.kubernetes.io/instance=linkis-demo-cg-engineconnmanager -o jsonpath='{.items[0].status.podIP}'` + +ECM_POD_SUBDOMAIN=`kubectl get pods -n linkis -l app.kubernetes.io/instance=linkis-demo-cg-engineconnmanager -o jsonpath='{.items[0].spec.subdomain}'` + +ECM_DNS="${ECM_POD_IP} ${ECM_POD_NAME}.${ECM_POD_SUBDOMAIN}.linkis.svc.cluster.local" + +kubectl exec -it -n ldh ${LDH_POD_NAME} -- bash -c "echo ${ECM_DNS} |sudo tee --append /etc/hosts" + + +rm -rf ldh; \ No newline at end of file diff --git a/linkis-dist/helm/scripts/remote-debug-proxy.sh b/linkis-dist/helm/scripts/remote-debug-proxy.sh index 7115e779cba..1fcc79efcb4 100755 --- a/linkis-dist/helm/scripts/remote-debug-proxy.sh +++ b/linkis-dist/helm/scripts/remote-debug-proxy.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/helm/scripts/remote-proxy.sh b/linkis-dist/helm/scripts/remote-proxy.sh new file mode 100644 index 00000000000..5b5f9550723 --- /dev/null +++ b/linkis-dist/helm/scripts/remote-proxy.sh @@ -0,0 +1,129 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +WORK_DIR=`cd $(dirname $0); pwd -P` + +ACTION=$1 + +DEBUG=$2 + +LINKIS_KUBE_NAMESPACE=linkis +LINKIS_INSTANCE_NAME=linkis-demo + +LINKIS_PORT_MAP_WEB="8088:8088" +LINKIS_PORT_MAP_MG_EUREKA="20303:20303" + +LINKIS_PORT_MAP_MG_GATEWAY="9001:9001" + +#debug port + +LINKIS_DEBUG_PORT_MAP_MG_EUREKA="22101:5005" +LINKIS_DEBUG_PORT_MAP_MG_GATEWAY="22102:5005" + +LINKIS_DEBUG_PORT_MAP_PS_PUBLICSERVICE="22103:5005" + +LINKIS_DEBUG_PORT_MAP_CG_LINKISMANAGER="22104:5005" +LINKIS_DEBUG_PORT_MAP_CG_ENTRANCE="22105:5005" +LINKIS_DEBUG_PORT_MAP_CG_ENGINECONNMANAGER="22106:5005" +LINKIS_DEBUG_PORT_MAP_CG_ENGINEPLUGIN="22107:5005" + + +start_port_forward() { + component_name=$1 + port_map=$2 + echo "- starting port-forwad for [${component_name}] with mapping [local->${port_map}->pod] ..." + POD_NAME=`kubectl get pods -n ${LINKIS_KUBE_NAMESPACE} -l app.kubernetes.io/instance=${LINKIS_INSTANCE_NAME}-${component_name} -o jsonpath='{.items[0].metadata.name}'` + kubectl port-forward -n ${LINKIS_KUBE_NAMESPACE} pod/${POD_NAME} ${port_map} --address='0.0.0.0' >/dev/null & +} + +stop_port_forward() { + component_name=$1 + port_map=$2 + echo "- stopping port-forward for [${component_name}] with mapping [local->${port_map}->pod] ..." + + pid=`ps aux |grep "port-forward" | grep " ${LINKIS_KUBE_NAMESPACE} " | grep "${component_name}" | grep "${port_map}" | awk -F ' ' '{print $2}'` + if [ "X$pid" != "X" ]; then + kill -9 $pid + fi +} + +start_port_forward_all() { + DEBUG=$1 + + start_port_forward web ${LINKIS_PORT_MAP_WEB} + start_port_forward mg-eureka ${LINKIS_PORT_MAP_MG_EUREKA} + start_port_forward mg-gateway ${LINKIS_PORT_MAP_MG_GATEWAY} + + if [ "${DEBUG}" == "true" ]; then + + start_port_forward mg-eureka ${LINKIS_DEBUG_PORT_MAP_MG_EUREKA} + start_port_forward mg-gateway ${LINKIS_DEBUG_PORT_MAP_MG_GATEWAY} + + start_port_forward ps-publicservice ${LINKIS_DEBUG_PORT_MAP_PS_PUBLICSERVICE} + + start_port_forward cg-linkismanager ${LINKIS_DEBUG_PORT_MAP_CG_LINKISMANAGER} + start_port_forward cg-entrance ${LINKIS_DEBUG_PORT_MAP_CG_ENTRANCE} + start_port_forward cg-engineconnmanager ${LINKIS_DEBUG_PORT_MAP_CG_ENGINECONNMANAGER} + start_port_forward cg-engineplugin ${LINKIS_DEBUG_PORT_MAP_CG_ENGINEPLUGIN} + fi +} + +stop_port_forward_all() { + DEBUG=$1 + + stop_port_forward web ${LINKIS_PORT_MAP_WEB} + stop_port_forward mg-eureka ${LINKIS_PORT_MAP_MG_EUREKA} + stop_port_forward mg-gateway ${LINKIS_PORT_MAP_MG_GATEWAY} + + if [ "${DEBUG}" == "true" ]; then + + stop_port_forward mg-eureka ${LINKIS_DEBUG_PORT_MAP_MG_EUREKA} + stop_port_forward mg-gateway ${LINKIS_DEBUG_PORT_MAP_MG_GATEWAY} + + stop_port_forward ps-publicservice ${LINKIS_DEBUG_PORT_MAP_PS_PUBLICSERVICE} + + stop_port_forward cg-linkismanager ${LINKIS_DEBUG_PORT_MAP_CG_LINKISMANAGER} + stop_port_forward cg-entrance ${LINKIS_DEBUG_PORT_MAP_CG_ENTRANCE} + stop_port_forward cg-engineconnmanager ${LINKIS_DEBUG_PORT_MAP_CG_ENGINECONNMANAGER} + stop_port_forward cg-engineplugin ${LINKIS_DEBUG_PORT_MAP_CG_ENGINEPLUGIN} + + fi + + +} + +case $ACTION in + "start") + start_port_forward_all + ;; + "stop") + stop_port_forward_all + ;; + "start-with-debug") + start_port_forward_all true + ;; + "stop-with-debug") + stop_port_forward_all true + ;; + "list") + ps aux |grep "port-forward" | grep " ${LINKIS_KUBE_NAMESPACE} " | grep "${LINKIS_INSTANCE_NAME}" + ;; + *) + echo "invalid arguments, only start,start-with-debug,stop,stop-with-debug,list are accepted" + exit -1 + ;; +esac diff --git a/linkis-dist/helm/scripts/resources/kind-cluster.yaml b/linkis-dist/helm/scripts/resources/kind-cluster.yaml index 1c071fa16c1..338dd13dbe9 100644 --- a/linkis-dist/helm/scripts/resources/kind-cluster.yaml +++ b/linkis-dist/helm/scripts/resources/kind-cluster.yaml @@ -20,6 +20,8 @@ nodes: extraMounts: - hostPath: ${KIND_CLUSTER_HOST_PATH} containerPath: /data + - hostPath: ${KIND_COMMON_PATH} + containerPath: /opt/common # - role: worker # extraMounts: # - hostPath: ${KIND_CLUSTER_HOST_PATH} diff --git a/linkis-dist/helm/scripts/resources/ldh/configmaps/configmap-hadoop.yaml b/linkis-dist/helm/scripts/resources/ldh/configmaps/configmap-hadoop.yaml index 354e23ef356..fa74a304c9d 100644 --- a/linkis-dist/helm/scripts/resources/ldh/configmaps/configmap-hadoop.yaml +++ b/linkis-dist/helm/scripts/resources/ldh/configmaps/configmap-hadoop.yaml @@ -530,130 +530,65 @@ data: #export YARN_ROUTER_OPTS= yarn-site.xml: | - - yarn.nodemanager.aux-services - mapreduce_shuffle - - - capacity-scheduler.xml: | - - - - yarn.scheduler.capacity.maximum-applications - 10000 - - Maximum number of applications that can be pending and running. - - - - - yarn.scheduler.capacity.maximum-am-resource-percent - 0.1 - - Maximum percent of resources in the cluster which can be used to run - application masters i.e. controls number of concurrent running - applications. - - - - - yarn.scheduler.capacity.resource-calculator - org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator - - The ResourceCalculator implementation to be used to compare - Resources in the scheduler. - The default i.e. DefaultResourceCalculator only uses Memory while - DominantResourceCalculator uses dominant-resource to compare - multi-dimensional resources such as Memory, CPU etc. - - - - - yarn.scheduler.capacity.root.queues - default - - The queues at the this level (root is the root queue). - - - - - yarn.scheduler.capacity.root.default.capacity - 100 - Default queue target capacity. - - - - yarn.scheduler.capacity.root.default.user-limit-factor - 1 - - Default queue user limit a percentage from 0.0 to 1.0. - - - - - yarn.scheduler.capacity.root.default.maximum-capacity - 100 - - The maximum capacity of the default queue. - - - - - yarn.scheduler.capacity.root.default.state - RUNNING - - The state of the default queue. State can be one of RUNNING or STOPPED. - - - - - yarn.scheduler.capacity.root.default.acl_submit_applications - * - - The ACL of who can submit jobs to the default queue. - - - - - yarn.scheduler.capacity.root.default.acl_administer_queue - * - - The ACL of who can administer jobs on the default queue. - - - - - yarn.scheduler.capacity.node-locality-delay - 40 - - Number of missed scheduling opportunities after which the CapacityScheduler - attempts to schedule rack-local containers. - Typically this should be set to number of nodes in the cluster, By default is setting - approximately number of nodes in one rack which is 40. - - - - - yarn.scheduler.capacity.queue-mappings - - - A list of mappings that will be used to assign jobs to queues - The syntax for this list is [u|g]:[name]:[queue_name][,next mapping]* - Typically this list will be used to map users to queues, - for example, u:%user:%user maps all users to queues with the same name - as the user. - - - - - yarn.scheduler.capacity.queue-mappings-override.enable - false - - If a queue mapping is present, will it override the value specified - by the user? This can be used by administrators to place jobs in queues - that are different than the one specified by the user. - The default is false. - - + + yarn.resourcemanager.scheduler.class + org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler + The class to use as the resource scheduler. + + + yarn.scheduler.fair.allocation.file + fair-scheduler.xml + Whether to use the username associated with the allocation as the default queue name, in the event that a queue name is not specified. + + + yarn.nodemanager.resource.memory-mb + 56080 + Amount of physical memory, in MB, that can be allocated for containers. + + + yarn.nodemanager.resource.cpu-vcores + 30 + Number of CPU cores that can be allocated for containers. + + + yarn.nodemanager.vmem-check-enabled + false + Whether virtual memory limits will be enforced for containers + + + yarn.nodemanager.vmem-pmem-ratio + 10.1 + Ratio between virtual memory to physical memory when + setting memory limits for containers. Container allocations are + expressed in terms of physical memory, and virtual memory usage + is allowed to exceed this allocation by this ratio. + + + yarn.nodemanager.aux-services + mapreduce_shuffle + the valid service name should only contain a-zA-Z0-9_ and can not start with numbers + + + yarn.nodemanager.disk-health-checker.enable + false + + + yarn.resourcemanager.address + ldh.ldh.svc.cluster.local:8032 + + + fair-scheduler.xml: | + + 0.5 + + + 3 + 2 + fair + * + * + + + \ No newline at end of file diff --git a/linkis-dist/helm/scripts/resources/ldh/configmaps/configmap-spark.yaml b/linkis-dist/helm/scripts/resources/ldh/configmaps/configmap-spark.yaml index 5d5185bfd3c..5c95e795c5f 100644 --- a/linkis-dist/helm/scripts/resources/ldh/configmaps/configmap-spark.yaml +++ b/linkis-dist/helm/scripts/resources/ldh/configmaps/configmap-spark.yaml @@ -80,6 +80,40 @@ data: # - MKL_NUM_THREADS=1 Disable multi-threading of Intel MKL # - OPENBLAS_NUM_THREADS=1 Disable multi-threading of OpenBLAS + hive-site.xml: | + + + + + javax.jdo.option.ConnectionURL + jdbc:mysql://mysql.mysql.svc.cluster.local:3306/hive_metadata?&createDatabaseIfNotExist=true&characterEncoding=UTF-8&useSSL=false + + + javax.jdo.option.ConnectionUserName + root + + + javax.jdo.option.ConnectionPassword + 123456 + + + javax.jdo.option.ConnectionDriverName + com.mysql.jdbc.Driver + + + datanucleus.schema.autoCreateAll + true + + + hive.metastore.schema.verification + false + + + hive.metastore.uris + thrift://ldh.ldh.svc.cluster.local:9083 + + + spark-defaults.conf: | # Default system properties included when running spark-submit. # This is useful for setting default environmental settings. @@ -172,37 +206,3 @@ data: appender.console.filter.1.regex = .*Thrift error occurred during processing of message.* appender.console.filter.1.onMatch = deny appender.console.filter.1.onMismatch = neutral - - hive-site.xml: | - - - - - javax.jdo.option.ConnectionURL - jdbc:mysql://mysql.mysql.svc.cluster.local:3306/hive_metadata?&createDatabaseIfNotExist=true&characterEncoding=UTF-8&useSSL=false - - - javax.jdo.option.ConnectionUserName - root - - - javax.jdo.option.ConnectionPassword - 123456 - - - javax.jdo.option.ConnectionDriverName - com.mysql.jdbc.Driver - - - datanucleus.schema.autoCreateAll - true - - - hive.metastore.schema.verification - false - - - hive.metastore.uris - thrift://ldh.ldh.svc.cluster.local:9083 - - diff --git a/linkis-dist/helm/scripts/resources/ldh/ldh.yaml b/linkis-dist/helm/scripts/resources/ldh/ldh.yaml index 6c2437e80c7..fec83e52170 100644 --- a/linkis-dist/helm/scripts/resources/ldh/ldh.yaml +++ b/linkis-dist/helm/scripts/resources/ldh/ldh.yaml @@ -82,77 +82,39 @@ spec: - name: data-dir emptyDir: {} - name: hadoop-conf - configMap: - name: hadoop-conf - items: - - key: hadoop-env.sh - path: hadoop-env.sh - - key: core-site.xml - path: core-site.xml - - key: hdfs-site.xml - path: hdfs-site.xml - - key: yarn-env.sh - path: yarn-env.sh - - key: yarn-site.xml - path: yarn-site.xml - - key: capacity-scheduler.xml - path: capacity-scheduler.xml - - key: log4j.properties - path: log4j.properties + projected: + sources: + - configMap: + name: hadoop-conf - name: hive-conf - configMap: - name: hive-conf - items: - - key: hive-env.sh - path: hive-env.sh - - key: hive-site.xml - path: hive-site.xml - - key: hive-log4j2.properties - path: hive-log4j2.properties - - key: beeline-log4j2.properties - path: beeline-log4j2.properties - - key: hive-exec-log4j2.properties - path: hive-exec-log4j2.properties + projected: + sources: + - configMap: + name: hive-conf - name: spark-conf - configMap: - name: spark-conf - items: - - key: spark-env.sh - path: spark-env.sh - - key: hive-site.xml - path: hive-site.xml - - key: spark-defaults.conf - path: spark-defaults.conf - - key: log4j2.properties - path: log4j2.properties + projected: + sources: + - configMap: + name: spark-conf + - configMap: + name: hive-conf + items: + - key: hive-site.xml + path: hive-site.xml - name: flink-conf - configMap: - name: flink-conf - items: - - key: flink-conf.yaml - path: flink-conf.yaml - - key: log4j-cli.properties - path: log4j-cli.properties - - key: log4j-console.properties - path: log4j-console.properties - - key: log4j-session.properties - path: log4j-session.properties - - key: log4j.properties - path: log4j.properties - - key: logback-console.xml - path: logback-console.xml - - key: logback-session.xml - path: logback-session.xml - - key: logback.xml - path: logback.xml + projected: + sources: + - configMap: + name: flink-conf - name: zookeeper-conf - configMap: - name: zookeeper-conf - items: - - key: zoo.cfg - path: zoo.cfg - - key: log4j.properties - path: log4j.properties + projected: + sources: + - configMap: + name: zookeeper-conf + - name: mysql-jar + hostPath: + path: /opt/common/extendlib + type: DirectoryOrCreate containers: - name: ldh image: linkis-ldh:${LDH_VERSION} @@ -214,3 +176,5 @@ spec: mountPath: /etc/ldh/flink - name: zookeeper-conf mountPath: /etc/ldh/zookeeper + - name: mysql-jar + mountPath: /opt/common/extendlib diff --git a/linkis-dist/package/bin/linkis-cli b/linkis-dist/package/bin/linkis-cli index 2c15cf67434..dfc9026e512 100644 --- a/linkis-dist/package/bin/linkis-cli +++ b/linkis-dist/package/bin/linkis-cli @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/bin/linkis-cli-hive b/linkis-dist/package/bin/linkis-cli-hive index 9bfc4169820..31ef0c54f0e 100644 --- a/linkis-dist/package/bin/linkis-cli-hive +++ b/linkis-dist/package/bin/linkis-cli-hive @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/bin/linkis-cli-pre b/linkis-dist/package/bin/linkis-cli-pre index 4273232245b..868bdbebe6e 100644 --- a/linkis-dist/package/bin/linkis-cli-pre +++ b/linkis-dist/package/bin/linkis-cli-pre @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/bin/linkis-cli-spark-sql b/linkis-dist/package/bin/linkis-cli-spark-sql index 4694dc463bc..8a8b58ea706 100644 --- a/linkis-dist/package/bin/linkis-cli-spark-sql +++ b/linkis-dist/package/bin/linkis-cli-spark-sql @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/bin/linkis-cli-spark-submit b/linkis-dist/package/bin/linkis-cli-spark-submit index 58318905795..2ae23046686 100644 --- a/linkis-dist/package/bin/linkis-cli-spark-submit +++ b/linkis-dist/package/bin/linkis-cli-spark-submit @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/bin/linkis-cli-sqoop b/linkis-dist/package/bin/linkis-cli-sqoop index 27fca1aec72..9d516996cae 100644 --- a/linkis-dist/package/bin/linkis-cli-sqoop +++ b/linkis-dist/package/bin/linkis-cli-sqoop @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/conf/linkis-cg-entrance.properties b/linkis-dist/package/conf/linkis-cg-entrance.properties index 6ef5b538d48..f2d136f6e8c 100644 --- a/linkis-dist/package/conf/linkis-cg-entrance.properties +++ b/linkis-dist/package/conf/linkis-cg-entrance.properties @@ -24,5 +24,10 @@ wds.linkis.server.user.restful.uri.pass.auth=/actuator/prometheus,/api/rest_j/v1 ## enable entrance label registration #spring.eureka.instance.metadata-map.route=et1 + +## enable variable operation default vaule false +# wds.linkis.variable.operation=true + + ##Spring spring.server.port=9104 \ No newline at end of file diff --git a/linkis-dist/package/conf/linkis-cg-linkismanager.properties b/linkis-dist/package/conf/linkis-cg-linkismanager.properties index f70442970da..5a8522b13f2 100644 --- a/linkis-dist/package/conf/linkis-cg-linkismanager.properties +++ b/linkis-dist/package/conf/linkis-cg-linkismanager.properties @@ -12,11 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - +# wds.linkis.test.mode=true +# wds.linkis.engineconn.debug.enable=true # ##mybatis -wds.linkis.server.mybatis.mapperLocations=classpath:org/apache/linkis/manager/dao/impl/*.xml,org/apache/linkis/manager/rm/external/dao/impl/ExternalResourceProviderDaoImpl.xml +wds.linkis.server.mybatis.mapperLocations=classpath:org/apache/linkis/manager/dao/impl/*.xml,org/apache/linkis/manager/rm/external/dao/impl/ExternalResourceProviderDaoImpl.xml,classpath:org/apache/linkis/engineplugin/server/dao/impl/*.xml wds.linkis.server.mybatis.typeAliasesPackage= -wds.linkis.server.mybatis.BasePackage=org.apache.linkis.manager.dao,org.apache.linkis.manager.rm.external.dao +wds.linkis.server.mybatis.BasePackage=org.apache.linkis.manager.dao,org.apache.linkis.manager.rm.external.dao,org.apache.linkis.engineplugin.server.dao + +wds.linkis.engineConn.plugin.cache.expire-in-seconds=100000 +wds.linkis.engineConn.dist.load.enable=true +#wds.linkis.engineconn.home=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins +#wds.linkis.engineconn.plugin.loader.store.path=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins + +wds.linkis.rpc.conf.enable.local.message=true +wds.linkis.rpc.conf.local.app.list=linkis-cg-linkismanager + ##Spring spring.server.port=9101 \ No newline at end of file diff --git a/linkis-dist/package/conf/linkis-ps-publicservice.properties b/linkis-dist/package/conf/linkis-ps-publicservice.properties index 4645b2b17db..d644ebbe8b6 100644 --- a/linkis-dist/package/conf/linkis-ps-publicservice.properties +++ b/linkis-dist/package/conf/linkis-ps-publicservice.properties @@ -34,7 +34,7 @@ hive.meta.password= wds.linkis.metadata.hive.encode.enabled=false # associated with the logged-in user when querying metadata:default value is true -#linkis.metadata.hive.permission.with-login-user-enabled +linkis.metadata.hive.permission.with-login-user-enabled=false #wds.linkis.jobhistory.undone.job.minimum.id=0 #wds.linkis.jobhistory.undone.job.refreshtime.daily=00:15 wds.linkis.cs.deserialize.replace_package_header.enable=false diff --git a/linkis-dist/package/db/linkis_ddl.sql b/linkis-dist/package/db/linkis_ddl.sql index 36c7759c101..e87a846457a 100644 --- a/linkis-dist/package/db/linkis_ddl.sql +++ b/linkis-dist/package/db/linkis_ddl.sql @@ -290,6 +290,7 @@ CREATE TABLE `linkis_ps_datasource_field` ( `is_partition_field` tinyint(1) NOT NULL, `is_primary` tinyint(1) NOT NULL, `length` int(11) DEFAULT NULL, + `mode_info` varchar(128) COLLATE utf8_bin DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; diff --git a/linkis-dist/package/db/upgrade/1.3.0_schema/mysql/linkis_dml.sql b/linkis-dist/package/db/upgrade/1.3.0_schema/mysql/linkis_dml.sql new file mode 100644 index 00000000000..9e35cac36ef --- /dev/null +++ b/linkis-dist/package/db/upgrade/1.3.0_schema/mysql/linkis_dml.sql @@ -0,0 +1,18 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43015','当前节点需要的CS表解析失败,请检查当前CSID对应的CS表是否存在','Cannot parse cs table for node',0); \ No newline at end of file diff --git a/linkis-dist/package/sbin/common.sh b/linkis-dist/package/sbin/common.sh index 4278507f9d8..268bdfe281e 100644 --- a/linkis-dist/package/sbin/common.sh +++ b/linkis-dist/package/sbin/common.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/sbin/ext/linkis-cg-engineconnmanager b/linkis-dist/package/sbin/ext/linkis-cg-engineconnmanager index 025190f105f..403d76c3592 100644 --- a/linkis-dist/package/sbin/ext/linkis-cg-engineconnmanager +++ b/linkis-dist/package/sbin/ext/linkis-cg-engineconnmanager @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/sbin/ext/linkis-cg-entrance b/linkis-dist/package/sbin/ext/linkis-cg-entrance index 43281281c30..3dd96ded60c 100644 --- a/linkis-dist/package/sbin/ext/linkis-cg-entrance +++ b/linkis-dist/package/sbin/ext/linkis-cg-entrance @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/sbin/ext/linkis-cg-linkismanager b/linkis-dist/package/sbin/ext/linkis-cg-linkismanager index 1418ba6c356..40a9b17c08c 100644 --- a/linkis-dist/package/sbin/ext/linkis-cg-linkismanager +++ b/linkis-dist/package/sbin/ext/linkis-cg-linkismanager @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/sbin/ext/linkis-common-start b/linkis-dist/package/sbin/ext/linkis-common-start index c93ce958c74..c312e58cd91 100644 --- a/linkis-dist/package/sbin/ext/linkis-common-start +++ b/linkis-dist/package/sbin/ext/linkis-common-start @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -69,8 +69,16 @@ if [ ! -r "$SERVER_LIB" ] ; then exit 1 fi +##The extended lib such mysql-connector-java-*.jar +export LINKIS_EXTENDED_LIB=$LINKIS_EXTENDED_LIB +if [ -r "$LINKIS_EXTENDED_LIB" ] ; then + echo "add extended lib $LINKIS_EXTENDED_LIB" +fi + + + ## set class path -export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/* +export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/*:$LINKIS_EXTENDED_LIB/* echo "=====Java Start Command=====" diff --git a/linkis-dist/package/sbin/ext/linkis-mg-eureka b/linkis-dist/package/sbin/ext/linkis-mg-eureka index f89a5bc04fd..6beb502d599 100644 --- a/linkis-dist/package/sbin/ext/linkis-mg-eureka +++ b/linkis-dist/package/sbin/ext/linkis-mg-eureka @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/sbin/ext/linkis-mg-gateway b/linkis-dist/package/sbin/ext/linkis-mg-gateway index bc51387bdd2..d918e9aed06 100644 --- a/linkis-dist/package/sbin/ext/linkis-mg-gateway +++ b/linkis-dist/package/sbin/ext/linkis-mg-gateway @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -72,8 +72,14 @@ if [ ! -r "$SERVER_LIB" ] ; then exit 1 fi +##The extended lib such mysql-connector-java-*.jar +export LINKIS_EXTENDED_LIB=$LINKIS_EXTENDED_LIB +if [ -r "$LINKIS_EXTENDED_LIB" ] ; then + echo "add extended lib $LINKIS_EXTENDED_LIB" +fi + ## set class path -export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$SERVER_LIB/* +export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$SERVER_LIB/*:$LINKIS_EXTENDED_LIB/* echo "=====Java Start Command=====" diff --git a/linkis-dist/package/sbin/ext/linkis-ps-publicservice b/linkis-dist/package/sbin/ext/linkis-ps-publicservice index eb75f9d2ec7..6ab96522645 100644 --- a/linkis-dist/package/sbin/ext/linkis-ps-publicservice +++ b/linkis-dist/package/sbin/ext/linkis-ps-publicservice @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/sbin/graceful-upgrade.sh b/linkis-dist/package/sbin/graceful-upgrade.sh index 233d86fb5fe..bbd41b1f79e 100644 --- a/linkis-dist/package/sbin/graceful-upgrade.sh +++ b/linkis-dist/package/sbin/graceful-upgrade.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -45,9 +45,6 @@ function getPort(){ "cg-linkismanager") export SERVER_PORT=$MANAGER_PORT ;; - "ps-cs") - export SERVER_PORT=$CS_PORT - ;; "cg-engineconnmanager") export SERVER_PORT=$ENGINECONNMANAGER_PORT ;; @@ -57,12 +54,6 @@ function getPort(){ "cg-engineplugin") export SERVER_PORT=$ENGINECONN_PLUGIN_SERVER_PORT ;; - "ps-data-source-manager") - export SERVER_PORT=$DATASOURCE_MANAGER_PORT - ;; - "ps-metadataquery") - export SERVER_PORT=$METADATA_QUERY_PORT - ;; esac } diff --git a/linkis-dist/package/sbin/kill-process-by-pid.sh b/linkis-dist/package/sbin/kill-process-by-pid.sh index aff0efea47d..eea93d03bb4 100644 --- a/linkis-dist/package/sbin/kill-process-by-pid.sh +++ b/linkis-dist/package/sbin/kill-process-by-pid.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/sbin/linkis-daemon.sh b/linkis-dist/package/sbin/linkis-daemon.sh index 91a328796ee..4bb96aace97 100755 --- a/linkis-dist/package/sbin/linkis-daemon.sh +++ b/linkis-dist/package/sbin/linkis-daemon.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-dist/package/sbin/linkis-start-all.sh b/linkis-dist/package/sbin/linkis-start-all.sh index 7f83581f127..e5c58667ffd 100644 --- a/linkis-dist/package/sbin/linkis-start-all.sh +++ b/linkis-dist/package/sbin/linkis-start-all.sh @@ -82,7 +82,7 @@ SERVER_NAME="cg-linkismanager" SERVER_IP=$MANAGER_INSTALL_IP startApp -sleep 15 +sleep 20 #linkis-cg-entrance SERVER_NAME="cg-entrance" @@ -94,11 +94,6 @@ SERVER_NAME="cg-engineconnmanager" SERVER_IP=$ENGINECONNMANAGER_INSTALL_IP startApp -#linkis-cg-engineplugin(ecp) -SERVER_NAME="cg-engineplugin" -SERVER_IP=$ENGINECONN_PLUGIN_SERVER_INSTALL_IP -startApp - echo "start-all shell script executed completely" echo "Start to check all linkis microservice" @@ -161,9 +156,4 @@ SERVER_NAME="cg-engineconnmanager" SERVER_IP=$ENGINECONNMANAGER_INSTALL_IP checkServer -#linkis-cg-engineplugin(ecp) -SERVER_NAME="cg-engineplugin" -SERVER_IP=$ENGINECONN_PLUGIN_SERVER_INSTALL_IP -checkServer - -echo "Linkis started successfully" +echo "Apache Linkis started successfully" diff --git a/linkis-dist/package/sbin/linkis-stop-all.sh b/linkis-dist/package/sbin/linkis-stop-all.sh index baa93dd85f8..0fe5a7a72be 100644 --- a/linkis-dist/package/sbin/linkis-stop-all.sh +++ b/linkis-dist/package/sbin/linkis-stop-all.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -69,11 +69,6 @@ SERVER_NAME="cg-entrance" SERVER_IP=$ENTRANCE_INSTALL_IP stopApp -#linkis-cg-engineplugin(ecp) -SERVER_NAME="cg-engineplugin" -SERVER_IP=$ENGINECONN_PLUGIN_SERVER_INSTALL_IP -stopApp - #linkis-ps-publicservice SERVER_NAME="ps-publicservice" SERVER_IP=$PUBLICSERVICE_INSTALL_IP diff --git a/linkis-dist/pom.xml b/linkis-dist/pom.xml index 63e19c9f3ca..b1c3bd52fcd 100644 --- a/linkis-dist/pom.xml +++ b/linkis-dist/pom.xml @@ -202,7 +202,6 @@ docker - dev centos:7 nginx:1.19.6 1.8.0-openjdk @@ -234,41 +233,41 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - + + + @@ -281,31 +280,31 @@ install - - + + - + - - - - - - - - - - - - - - + + + + + + + + + + + + + + - - - + + + @@ -318,64 +317,64 @@ install - + - - - - - - - + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - + + + diff --git a/linkis-dist/release-docs/LICENSE b/linkis-dist/release-docs/LICENSE index 5db232030c4..fff8c559ba9 100644 --- a/linkis-dist/release-docs/LICENSE +++ b/linkis-dist/release-docs/LICENSE @@ -241,7 +241,9 @@ See licenses/ for text of these licenses. (Apache License, Version 2.0) rest (org.elasticsearch.client:elasticsearch-rest-client:7.6.2 - https://github.com/elastic/elasticsearch) (Apache License, Version 2.0) sniffer (org.elasticsearch.client:elasticsearch-rest-client-sniffer:7.6.2 - https://github.com/elastic/elasticsearch) (Apache License, Version 2.0) (GNU Lesser General Public License (LGPL), Version 2.1) JAX-RS provider for JSON content type (org.codehaus.jackson:jackson-jaxrs:1.9.13 - http://jackson.codehaus.org) + (Apache License, Version 2.0) (GNU Lesser General Public License (LGPL), Version 2.1) JAX-RS provider for JSON content type (org.codehaus.jackson:jackson-jaxrs:1.9.2 - http://jackson.codehaus.org) (Apache License, Version 2.0) (GNU Lesser General Public License (LGPL), Version 2.1) Xml Compatibility extensions for Jackson (org.codehaus.jackson:jackson-xc:1.9.13 - http://jackson.codehaus.org) + (Apache License, Version 2.0) (GNU Lesser General Public License (LGPL), Version 2.1) Xml Compatibility extensions for Jackson (org.codehaus.jackson:jackson-xc:1.9.2 - http://jackson.codehaus.org) (Apache License, Version 2.0) (GNU Library or Lesser General Public License (LGPL) V2.1) JSQLParser library (com.github.jsqlparser:jsqlparser:1.0 - https://github.com/JSQLParser/JSqlParser) (Apache License, Version 2.0) (LGPL 2.1) (MPL 1.1) Javassist (org.javassist:javassist:3.19.0-GA - http://www.javassist.org/) (Apache License, Version 2.0) (The SAX License) (The W3C License) XML Commons External Components XML APIs (xml-apis:xml-apis:1.4.01 - http://xml.apache.org/commons/components/external/) @@ -286,7 +288,9 @@ See licenses/ for text of these licenses. (Apache License, Version 2.0) Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-jul/) (Apache License, Version 2.0) Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/) (Apache License, Version 2.0) Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.6.2 - http://logging.apache.org/log4j/2.x/log4j-web/) - (Apache License, Version 2.0) Apache POI (org.apache.poi:poi-ooxml-lite:5.2.2 - http://poi.apache.org/) + (Apache License, Version 2.0) Apache POI (org.apache.poi:poi:5.2.3 - http://poi.apache.org/) + (Apache License, Version 2.0) Apache POI (org.apache.poi:poi-ooxml:5.2.3 - http://poi.apache.org/) + (Apache License, Version 2.0) Apache POI (org.apache.poi:poi-ooxml-lite:5.2.3 - http://poi.apache.org/) (Apache License, Version 2.0) Apache Velocity (org.apache.velocity:velocity:1.5 - http://velocity.apache.org/engine/releases/velocity-1.5/) (Apache License, Version 2.0) ApacheDS I18n (org.apache.directory.server:apacheds-i18n:2.0.0-M15 - http://directory.apache.org/apacheds/1.5/apacheds-i18n) (Apache License, Version 2.0) ApacheDS Protocol Kerberos Codec (org.apache.directory.server:apacheds-kerberos-codec:2.0.0-M15 - http://directory.apache.org/apacheds/1.5/apacheds-kerberos-codec) @@ -309,6 +313,7 @@ See licenses/ for text of these licenses. (Apache License, Version 2.0) Curator Framework (org.apache.curator:curator-framework:2.6.0 - http://curator.apache.org/curator-framework) (Apache License, Version 2.0) Curator Recipes (org.apache.curator:curator-recipes:2.6.0 - http://curator.apache.org/curator-recipes) (Apache License, Version 2.0) Data Mapper for Jackson (org.codehaus.jackson:jackson-mapper-asl:1.9.13 - http://jackson.codehaus.org) + (Apache License, Version 2.0) Data Mapper for Jackson (org.codehaus.jackson:jackson-mapper-asl:1.9.2 - http://jackson.codehaus.org) (Apache License, Version 2.0) DataNucleus Core (org.datanucleus:datanucleus-core:3.2.10 - http://www.datanucleus.org) (Apache License, Version 2.0) Digester (commons-digester:commons-digester:1.8 - http://jakarta.apache.org/commons/digester/) (Apache License, Version 2.0) Evictor (com.stoyanr:evictor:1.0.0 - https://github.com/stoyanr/Evictor) @@ -338,19 +343,19 @@ See licenses/ for text of these licenses. (Apache License, Version 2.0) JMES Path Query library (com.amazonaws:jmespath-java:1.11.277 - https://aws.amazon.com/sdkforjava) (Apache License, Version 2.0) JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:3.1.0 - http://metrics.codahale.com/metrics-jvm/) (Apache License, Version 2.0) Jackson (org.codehaus.jackson:jackson-core-asl:1.9.13 - http://jackson.codehaus.org) + (Apache License, Version 2.0) Jackson (org.codehaus.jackson:jackson-core-asl:1.9.2 - http://jackson.codehaus.org) (Apache License, Version 2.0) Jackson Integration for Metrics (io.dropwizard.metrics:metrics-json:3.1.0 - http://metrics.codahale.com/metrics-json/) - (Apache License, Version 2.0) Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.10.0 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) - (Apache License, Version 2.0) Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.10.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) - (Apache License, Version 2.0) Jackson module: JAXB Annotations (com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.10.1 - https://github.com/FasterXML/jackson-modules-base) - (Apache License, Version 2.0) Jackson module: Paranamer (com.fasterxml.jackson.module:jackson-module-paranamer:2.11.3 - https://github.com/FasterXML/jackson-modules-base) - (Apache License, Version 2.0) Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.10.0 - http://github.com/FasterXML/jackson) - (Apache License, Version 2.0) Jackson-dataformat-CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.6.7 - http://wiki.fasterxml.com/JacksonForCbor) - (Apache License, Version 2.0) Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.13.2 - http://github.com/FasterXML/jackson-dataformats-binary) - (Apache License, Version 2.0) Jackson-dataformat-CSV (com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.13.2 - https://github.com/FasterXML/jackson-dataformats-text) - (Apache License, Version 2.0) Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.13.2 - http://github.com/FasterXML/jackson-dataformats-binary) - (Apache License, Version 2.0) Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.13.2 - https://github.com/FasterXML/jackson-dataformats-text) - (Apache License, Version 2.0) Jackson-dataformat-XML (com.fasterxml.jackson.dataformat:jackson-dataformat-xml:2.10.1 - https://github.com/FasterXML/jackson-dataformat-xml) - (Apache License, Version 2.0) Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.10.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) + (Apache License, Version 2.0) Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + (Apache License, Version 2.0) Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) + (Apache License, Version 2.0) Jackson module: JAXB Annotations (com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.13.4 - https://github.com/FasterXML/jackson-modules-base) + (Apache License, Version 2.0) Jackson module: Paranamer (com.fasterxml.jackson.module:jackson-module-paranamer:2.13.4 - https://github.com/FasterXML/jackson-modules-base) + (Apache License, Version 2.0) Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.13.4 - http://github.com/FasterXML/jackson) + (Apache License, Version 2.0) Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.13.4 - http://github.com/FasterXML/jackson-dataformats-binary) + (Apache License, Version 2.0) Jackson-dataformat-CSV (com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.13.4 - https://github.com/FasterXML/jackson-dataformats-text) + (Apache License, Version 2.0) Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.13.4 - http://github.com/FasterXML/jackson-dataformats-binary) + (Apache License, Version 2.0) Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.13.4 - https://github.com/FasterXML/jackson-dataformats-text) + (Apache License, Version 2.0) Jackson-dataformat-XML (com.fasterxml.jackson.dataformat:jackson-dataformat-xml:2.13.4 - https://github.com/FasterXML/jackson-dataformat-xml) + (Apache License, Version 2.0) Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) (Apache License, Version 2.0) Jettison (org.codehaus.jettison:jettison:1.3.7 - http://codehaus.org/jettison/) (Apache License, Version 2.0) Joda-Time (joda-time:joda-time:2.3 - http://www.joda.org/joda-time/) (Apache License, Version 2.0) Netty/All-in-One (io.netty:netty-all:4.0.23.Final - http://netty.io/netty-all/) @@ -414,7 +419,7 @@ See licenses/ for text of these licenses. (Apache License, Version 2.0) Spring Web MVC (org.springframework:spring-webmvc:5.2.2.RELEASE - https://github.com/spring-projects/spring-framework) (Apache License, Version 2.0) Spring WebFlux (org.springframework:spring-webflux:5.2.2.RELEASE - https://github.com/spring-projects/spring-framework) (Apache License, Version 2.0) StAX API (stax:stax-api:1.0.1 - http://stax.codehaus.org/) - (Apache License, Version 2.0) Streaming Excel reader (com.github.pjfanning:excel-streaming-reader:4.0.1 - https://github.com/pjfanning/excel-streaming-reader) + (Apache License, Version 2.0) Streaming Excel reader (com.github.pjfanning:excel-streaming-reader:4.0.2 - https://github.com/pjfanning/excel-streaming-reader) (Apache License, Version 2.0) Woodstox (com.fasterxml.woodstox:woodstox-core:5.2.1 - https://github.com/FasterXML/woodstox) (Apache License, Version 2.0) XML Commons Resolver Component (xml-resolver:xml-resolver:1.2 - http://xml.apache.org/commons/components/resolver/) (Apache License, Version 2.0) Xerces2 Java (com.rackspace.apache:xerces2-xsd11:2.11.1 - http://xerces.apache.org/xerces2-j/) @@ -437,8 +442,8 @@ See licenses/ for text of these licenses. (Apache License, Version 2.0) hadoop-yarn-server-common (org.apache.hadoop:hadoop-yarn-server-common:2.6.5 - https://hadoop.apache.org/) (Apache License, Version 2.0) htrace-core (org.apache.htrace:htrace-core:3.1.0-incubating - http://incubator.apache.org/projects/htrace.html) (Apache License, Version 2.0) hystrix-core (com.netflix.hystrix:hystrix-core:1.4.3 - https://github.com/Netflix/Hystrix) - (Apache License, Version 2.0) jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.10.0 - http://github.com/FasterXML/jackson) - (Apache License, Version 2.0) jackson-module-scala (com.fasterxml.jackson.module:jackson-module-scala_2.11:2.11.3 - http://wiki.fasterxml.com/JacksonModuleScala) + (Apache License, Version 2.0) jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.13.4.1 - http://github.com/FasterXML/jackson) + (Apache License, Version 2.0) jackson-module-scala (com.fasterxml.jackson.module:jackson-module-scala_2.11:2.13.4 - http://wiki.fasterxml.com/JacksonModuleScala) (Apache License, Version 2.0) javax.inject (javax.inject:javax.inject:1 - http://code.google.com/p/atinject/) (Apache License, Version 2.0) json4s-ast (org.json4s:json4s-ast_2.11:3.5.3 - https://github.com/json4s/json4s) (Apache License, Version 2.0) json4s-core (org.json4s:json4s-core_2.11:3.5.3 - https://github.com/json4s/json4s) @@ -502,7 +507,7 @@ See licenses/ for text of these licenses. (Apache License, version 2.0) chill-java (com.twitter:chill-java:0.7.6 - https://github.com/twitter/chill) (Apache License, version 2.0) chill_2.11 (com.twitter:chill_2.11:0.7.6 - https://github.com/twitter/chill) (Apache License, version 2.0) druid (com.alibaba:druid:1.1.22 - https://github.com/alibaba/druid) - (Apache License, version 2.0) excel-streaming-reader (com.github.pjfanning:excel-streaming-reader:4.0.1 - https://github.com/pjfanning/excel-streaming-reader) + (Apache License, version 2.0) excel-streaming-reader (com.github.pjfanning:excel-streaming-reader:4.0.2 - https://github.com/pjfanning/excel-streaming-reader) (Apache License, version 2.0) failureaccess (com.google.guava:failureaccess:1.0.1) (Apache License, version 2.0) flink-connector-base (org.apache.flink:flink-connector-base:1.12.2 - https://flink.apache.org/) (Apache License, version 2.0) flink-connector-files (org.apache.flink:flink-connector-files:1.12.2 - https://flink.apache.org/) @@ -559,7 +564,7 @@ See licenses/ for text of these licenses. (Apache License, version 2.0) okhttp:Square’s meticulous HTTP client for Java and Kotlin. (com.squareup.okhttp:okhttp:2.7.5 - https://square.github.io/okhttp/) (Apache License, version 2.0) okio:A modern I/O API for Java (com.squareup.okio:okio:1.6.0 - https://github.com/square/okio/) (Apache License, version 2.0) opencsv:A simple library for reading and writing CSV in Java (net.sf.opencsv:opencsv:2.3 - http://opencsv.sf.net) - (Apache License, version 2.0) POI Shared Strings Memory efficient Shared Strings Table implementation for POI streaming (com.github.pjfanning:poi-shared-strings:2.5.1 - https://github.com/pjfanning/poi-shared-strings) + (Apache License, version 2.0) POI Shared Strings Memory efficient Shared Strings Table implementation for POI streaming (com.github.pjfanning:poi-shared-strings:2.5.4 - https://github.com/pjfanning/poi-shared-strings) (Apache License, version 2.0) snappy-java: A fast compression/decompression library (org.xerial.snappy:snappy-java:1.1.7.7 - https://github.com/xerial/snappy-java) (Apache License, version 2.0) SparseBitSet: An efficient sparse bitset implementation for Java (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet) (Apache License, version 2.0) Spring Plugin - Core Core plugin infrastructure (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin) @@ -577,7 +582,7 @@ See licenses/ for text of these licenses. (Apache License, version 2.0) swagger-annotations (io.swagger:swagger-annotations:1.5.22 - https://github.com/swagger-api) (Apache License, version 2.0) swagger-models (io.swagger:swagger-models:1.5.22 - https://github.com/swagger-api) (Apache License, version 2.0) token-provider (org.apache.kerby:token-provider:1.0.1) - (Apache License, version 2.0) XmlBeans:XmlBeans main jar (org.apache.xmlbeans:xmlbeans:5.0.3 - https://xmlbeans.apache.org/) + (Apache License, version 2.0) XmlBeans:XmlBeans main jar (org.apache.xmlbeans:xmlbeans:5.1:1 - https://xmlbeans.apache.org/) (Apache License, version 2.0) XStream Core (com.thoughtworks.xstream:xstream:1.4.18) (Apache License, version 2.0) Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.4.6 - https://zookeeper.apache.org/) (Apache License, version 2.0) Apache twill (org.apache.twill:* - https://twill.apache.org/) diff --git a/linkis-dist/release-docs/licenses/License-druid.txt b/linkis-dist/release-docs/licenses/LICENSE-druid.txt similarity index 100% rename from linkis-dist/release-docs/licenses/License-druid.txt rename to linkis-dist/release-docs/licenses/LICENSE-druid.txt diff --git a/linkis-dist/release-docs/licenses/LICENSE-dss-gateway-support.txt b/linkis-dist/release-docs/licenses/LICENSE-dss-gateway-support.txt new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/linkis-dist/release-docs/licenses/LICENSE-dss-gateway-support.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/linkis-dist/src/main/assembly/distribution.xml b/linkis-dist/src/main/assembly/distribution.xml index c35b3441013..b45e14b4667 100644 --- a/linkis-dist/src/main/assembly/distribution.xml +++ b/linkis-dist/src/main/assembly/distribution.xml @@ -16,7 +16,8 @@ ~ limitations under the License. --> - + dist tar.gz @@ -46,6 +47,16 @@ unix + + docker/ + docker + + **/* + + 0755 + unix + + package/ linkis-package @@ -66,6 +77,26 @@ unix + + docker/ + docker + + **/* + + 0755 + unix + + + + helm/ + helm + + **/* + + 0755 + unix + + ../ @@ -163,7 +194,7 @@ - + ../linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/target/out/lib @@ -176,23 +207,10 @@ - - - - ../linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-server/target/out/lib/ - - - linkis-package/lib/linkis-computation-governance/linkis-cg-engineplugin - - - *.jar - - - - ../linkis-computation-governance/linkis-manager/linkis-application-manager/target/out/lib + ../linkis-computation-governance/linkis-manager/target/out/lib linkis-package/lib/linkis-computation-governance/linkis-cg-linkismanager @@ -240,7 +258,7 @@ linkis-package/lib/linkis-public-enhancements/linkis-ps-publicservice - *.jar + **/* diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/elasticsearch/src/main/assembly/distribution.xml index 2ecd8e0dc92..d5360d46ce1 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/elasticsearch/src/main/assembly/distribution.xml @@ -16,10 +16,8 @@ ~ limitations under the License. --> - + linkis-engineplugin-elasticsearch dir diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/ElasticSearchEngineConnExecutor.scala b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/ElasticSearchEngineConnExecutor.scala similarity index 97% rename from linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/ElasticSearchEngineConnExecutor.scala rename to linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/ElasticSearchEngineConnExecutor.scala index e99b0aefa30..d204481278b 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/ElasticSearchEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/ElasticSearchEngineConnExecutor.scala @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.engineplugin.elasticsearch.executer +package org.apache.linkis.engineplugin.elasticsearch.executor import org.apache.linkis.common.utils.{Logging, OverloadUtils, Utils} import org.apache.linkis.engineconn.common.conf.{EngineConnConf, EngineConnConstant} @@ -29,13 +29,13 @@ import org.apache.linkis.engineplugin.elasticsearch.conf.{ ElasticSearchConfiguration, ElasticSearchEngineConsoleConf } -import org.apache.linkis.engineplugin.elasticsearch.executer.client.{ +import org.apache.linkis.engineplugin.elasticsearch.executor.client.{ ElasticSearchErrorResponse, ElasticSearchExecutor, ElasticSearchJsonResponse, ElasticSearchTableResponse } -import org.apache.linkis.engineplugin.elasticsearch.executer.client.ElasticSearchErrorResponse +import org.apache.linkis.engineplugin.elasticsearch.executor.client.ElasticSearchErrorResponse import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.manager.common.entity.resource.{ CommonNodeResource, diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/ElasticSearchExecutorOrder.scala b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/ElasticSearchExecutorOrder.scala similarity index 93% rename from linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/ElasticSearchExecutorOrder.scala rename to linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/ElasticSearchExecutorOrder.scala index 29f424d8727..ea49f9fe2d9 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/ElasticSearchExecutorOrder.scala +++ b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/ElasticSearchExecutorOrder.scala @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.engineplugin.elasticsearch.executer +package org.apache.linkis.engineplugin.elasticsearch.executor object ElasticSearchExecutorOrder extends Enumeration { diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/ElasticSearchExecutor.scala b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/ElasticSearchExecutor.scala similarity index 91% rename from linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/ElasticSearchExecutor.scala rename to linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/ElasticSearchExecutor.scala index f4eb778c46e..72e1953e334 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/ElasticSearchExecutor.scala +++ b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/ElasticSearchExecutor.scala @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.engineplugin.elasticsearch.executer.client +package org.apache.linkis.engineplugin.elasticsearch.executor.client import org.apache.linkis.common.utils.Logging -import org.apache.linkis.engineplugin.elasticsearch.executer.client.impl.ElasticSearchExecutorImpl +import org.apache.linkis.engineplugin.elasticsearch.executor.client.impl.ElasticSearchExecutorImpl import org.apache.linkis.scheduler.executer.ExecuteResponse import java.io.IOException diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/ElasticSearchResponse.scala b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/ElasticSearchResponse.scala similarity index 95% rename from linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/ElasticSearchResponse.scala rename to linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/ElasticSearchResponse.scala index 13a5716e381..b01b1095540 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/ElasticSearchResponse.scala +++ b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/ElasticSearchResponse.scala @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.engineplugin.elasticsearch.executer.client +package org.apache.linkis.engineplugin.elasticsearch.executor.client import org.apache.linkis.storage.domain.Column import org.apache.linkis.storage.resultset.table.TableRecord diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/EsClient.scala b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/EsClient.scala similarity index 98% rename from linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/EsClient.scala rename to linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/EsClient.scala index 5ad65aa0dbf..8b894b4664a 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/EsClient.scala +++ b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/EsClient.scala @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.engineplugin.elasticsearch.executer.client +package org.apache.linkis.engineplugin.elasticsearch.executor.client import org.apache.linkis.common.utils.Utils import org.apache.linkis.engineplugin.elasticsearch.conf.ElasticSearchConfiguration._ diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/EsClientFactory.scala b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/EsClientFactory.scala similarity index 98% rename from linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/EsClientFactory.scala rename to linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/EsClientFactory.scala index eef33242026..c4e3ef17ff9 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/EsClientFactory.scala +++ b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/EsClientFactory.scala @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.engineplugin.elasticsearch.executer.client +package org.apache.linkis.engineplugin.elasticsearch.executor.client import org.apache.linkis.common.conf.CommonVars import org.apache.linkis.engineplugin.elasticsearch.conf.ElasticSearchConfiguration._ diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/ResponseHandler.scala b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/ResponseHandler.scala similarity index 95% rename from linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/ResponseHandler.scala rename to linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/ResponseHandler.scala index 2b331762470..d259ea7b694 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/ResponseHandler.scala +++ b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/ResponseHandler.scala @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.engineplugin.elasticsearch.executer.client +package org.apache.linkis.engineplugin.elasticsearch.executor.client import org.apache.linkis.common.utils.Logging -import org.apache.linkis.engineplugin.elasticsearch.executer.client.impl.ResponseHandlerImpl +import org.apache.linkis.engineplugin.elasticsearch.executor.client.impl.ResponseHandlerImpl import org.apache.linkis.storage.domain._ import java.util.Locale diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/impl/ElasticSearchExecutorImpl.scala b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/impl/ElasticSearchExecutorImpl.scala similarity index 95% rename from linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/impl/ElasticSearchExecutorImpl.scala rename to linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/impl/ElasticSearchExecutorImpl.scala index 7237145d5bd..8d910c023c4 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/impl/ElasticSearchExecutorImpl.scala +++ b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/impl/ElasticSearchExecutorImpl.scala @@ -15,12 +15,12 @@ * limitations under the License. */ -package org.apache.linkis.engineplugin.elasticsearch.executer.client.impl +package org.apache.linkis.engineplugin.elasticsearch.executor.client.impl import org.apache.linkis.common.utils.Utils import org.apache.linkis.engineplugin.elasticsearch.conf.ElasticSearchConfiguration import org.apache.linkis.engineplugin.elasticsearch.exception.EsConvertResponseException -import org.apache.linkis.engineplugin.elasticsearch.executer.client.{ +import org.apache.linkis.engineplugin.elasticsearch.executor.client.{ ElasticSearchErrorResponse, ElasticSearchExecutor, ElasticSearchResponse, @@ -28,7 +28,7 @@ import org.apache.linkis.engineplugin.elasticsearch.executer.client.{ EsClientFactory, ResponseHandler } -import org.apache.linkis.engineplugin.elasticsearch.executer.client.ResponseHandler +import org.apache.linkis.engineplugin.elasticsearch.executor.client.ResponseHandler import org.apache.linkis.protocol.constants.TaskConstant import org.apache.linkis.scheduler.executer.{ AliasOutputExecuteResponse, diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/impl/ResponseHandlerImpl.scala b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/impl/ResponseHandlerImpl.scala similarity index 95% rename from linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/impl/ResponseHandlerImpl.scala rename to linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/impl/ResponseHandlerImpl.scala index 9371971a2ef..23e6a4cbab1 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executer/client/impl/ResponseHandlerImpl.scala +++ b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/executor/client/impl/ResponseHandlerImpl.scala @@ -15,18 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.engineplugin.elasticsearch.executer.client.impl +package org.apache.linkis.engineplugin.elasticsearch.executor.client.impl import org.apache.linkis.common.utils.Utils import org.apache.linkis.engineplugin.elasticsearch.exception.EsConvertResponseException -import org.apache.linkis.engineplugin.elasticsearch.executer.client.{ +import org.apache.linkis.engineplugin.elasticsearch.executor.client.{ ElasticSearchJsonResponse, ElasticSearchResponse, ElasticSearchTableResponse, ResponseHandler } -import org.apache.linkis.engineplugin.elasticsearch.executer.client.ResponseHandler -import org.apache.linkis.engineplugin.elasticsearch.executer.client.ResponseHandler._ +import org.apache.linkis.engineplugin.elasticsearch.executor.client.ResponseHandler +import org.apache.linkis.engineplugin.elasticsearch.executor.client.ResponseHandler._ import org.apache.linkis.storage.domain._ import org.apache.linkis.storage.resultset.table.TableRecord diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/factory/ElasticSearchJsonExecutorFactory.scala b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/factory/ElasticSearchJsonExecutorFactory.scala index c3937d1a73b..efee33041b4 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/factory/ElasticSearchJsonExecutorFactory.scala +++ b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/factory/ElasticSearchJsonExecutorFactory.scala @@ -22,7 +22,7 @@ import org.apache.linkis.engineconn.common.engineconn.EngineConn import org.apache.linkis.engineconn.computation.executor.creation.ComputationExecutorFactory import org.apache.linkis.engineconn.computation.executor.execute.ComputationExecutor import org.apache.linkis.engineplugin.elasticsearch.conf.ElasticSearchConfiguration -import org.apache.linkis.engineplugin.elasticsearch.executer.ElasticSearchEngineConnExecutor +import org.apache.linkis.engineplugin.elasticsearch.executor.ElasticSearchEngineConnExecutor import org.apache.linkis.governance.common.paser.JsonCodeParser import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.engine.RunType diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/factory/ElasticSearchSqlExecutorFactory.scala b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/factory/ElasticSearchSqlExecutorFactory.scala index 63496b64d6e..a32d6c93795 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/factory/ElasticSearchSqlExecutorFactory.scala +++ b/linkis-engineconn-plugins/elasticsearch/src/main/scala/org/apache/linkis/engineplugin/elasticsearch/factory/ElasticSearchSqlExecutorFactory.scala @@ -23,7 +23,7 @@ import org.apache.linkis.engineconn.computation.executor.creation.ComputationExe import org.apache.linkis.engineconn.computation.executor.execute.ComputationExecutor import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineplugin.elasticsearch.conf.ElasticSearchConfiguration -import org.apache.linkis.engineplugin.elasticsearch.executer.ElasticSearchEngineConnExecutor +import org.apache.linkis.engineplugin.elasticsearch.executor.ElasticSearchEngineConnExecutor import org.apache.linkis.governance.common.paser.SQLCodeParser import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.engine.RunType diff --git a/linkis-engineconn-plugins/elasticsearch/src/test/scala/org/apache/linkis/engineplugin/elasticsearch/TestElasticSearchEngineConnPlugin.scala b/linkis-engineconn-plugins/elasticsearch/src/test/scala/org/apache/linkis/engineplugin/elasticsearch/TestElasticSearchEngineConnPlugin.scala new file mode 100644 index 00000000000..4b854d9cf3d --- /dev/null +++ b/linkis-engineconn-plugins/elasticsearch/src/test/scala/org/apache/linkis/engineplugin/elasticsearch/TestElasticSearchEngineConnPlugin.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineplugin.elasticsearch + +import org.junit.jupiter.api.{Assertions, Test} + +class TestElasticSearchEngineConnPlugin { + + @Test + def testGetEngineResourceFactory: Unit = { + val elasticSearchEngineConnPlugin = new ElasticSearchEngineConnPlugin + elasticSearchEngineConnPlugin.init(null) + Assertions.assertNotNull(elasticSearchEngineConnPlugin.getEngineConnFactory) + Assertions.assertNotNull(elasticSearchEngineConnPlugin.getEngineConnLaunchBuilder) + Assertions.assertNotNull(elasticSearchEngineConnPlugin.getEngineResourceFactory) + Assertions.assertNotNull(elasticSearchEngineConnPlugin.getDefaultLabels) + } + +} diff --git a/linkis-engineconn-plugins/flink/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/flink/src/main/assembly/distribution.xml index 35eba4025ae..222f964c52a 100644 --- a/linkis-engineconn-plugins/flink/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/flink/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-engineconn-plugin-flink dir diff --git a/linkis-engineconn-plugins/hive/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/hive/src/main/assembly/distribution.xml index f06614da222..c9e84398eae 100644 --- a/linkis-engineconn-plugins/hive/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/hive/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-engineplugin-hive dir diff --git a/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/errorcode/HiveErrorCodeSummary.java b/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/errorcode/HiveErrorCodeSummary.java new file mode 100644 index 00000000000..6c12583f090 --- /dev/null +++ b/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/errorcode/HiveErrorCodeSummary.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineplugin.hive.errorcode; + +import org.apache.linkis.common.errorcode.ErrorCodeUtils; + +public enum HiveErrorCodeSummary { + /** + * 10000-10999 linkis-frame 11000-12999 linkis-commons 13000-14999 linkis-spring-cloud-services + * 15000-19999 linkis-public-enhancements 20000-24999 linkis-computation-governance 25000-25999 + * linkis-extensions 26000-29999 linkis-engineconn-plugins + */ + CREATE_HIVE_EXECUTOR_ERROR( + 26040, + "failed to create hive executor(创建hive执行器失败)", + "failed to create hive executor(创建hive执行器失败)", + "hiveEngine"), + HIVE_EXEC_JAR_ERROR( + 26041, + "cannot find hive-exec.jar, start session failed(找不到 hive-exec.jar,启动会话失败)", + "cannot find hive-exec.jar, start session failed(找不到 hive-exec.jar,启动会话失败)", + "hiveEngine"), + GET_FIELD_SCHEMAS_ERROR( + 26042, + "cannot get the field schemas(无法获取字段模式)", + "cannot get the field schemas(无法获取字段模式)", + "hiveEngine"), + INVALID_VALUE(26043, "invalid value(无效值)", "invalid value(无效值)", "hiveEngine"); + + private int errorCode; + + private String errorDesc; + + private String comment; + + private String module; + + HiveErrorCodeSummary(int errorCode, String errorDesc, String comment, String module) { + ErrorCodeUtils.validateErrorCode(errorCode, 26000, 29999); + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + this.module = module; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + public String getModule() { + return module; + } + + public void setModule(String module) { + this.module = module; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-engineconn-plugins/hive/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/hive/src/main/resources/log4j2.xml index 910e15b97ea..d490defe93b 100644 --- a/linkis-engineconn-plugins/hive/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/hive/src/main/resources/log4j2.xml @@ -18,8 +18,8 @@ - + @@ -27,7 +27,7 @@ - + diff --git a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala index d4a78c5ba66..9a46975979c 100644 --- a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala +++ b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala @@ -25,6 +25,8 @@ import org.apache.linkis.engineconn.executor.entity.LabelExecutor import org.apache.linkis.engineplugin.hive.common.HiveUtils import org.apache.linkis.engineplugin.hive.conf.HiveEngineConfiguration import org.apache.linkis.engineplugin.hive.entity.HiveSession +import org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.CREATE_HIVE_EXECUTOR_ERROR +import org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.HIVE_EXEC_JAR_ERROR import org.apache.linkis.engineplugin.hive.exception.HiveSessionStartFailedException import org.apache.linkis.engineplugin.hive.executor.HiveEngineConnExecutor import org.apache.linkis.hadoop.common.utils.HDFSUtils @@ -62,7 +64,10 @@ class HiveEngineConnFactory extends ComputationSingleExecutorEngineConnFactory w hiveSession.baos ) case _ => - throw HiveSessionStartFailedException(40012, "Failed to create hive executor") + throw HiveSessionStartFailedException( + CREATE_HIVE_EXECUTOR_ERROR.getErrorCode, + CREATE_HIVE_EXECUTOR_ERROR.getErrorDesc + ) } } @@ -77,8 +82,8 @@ class HiveEngineConnFactory extends ComputationSingleExecutorEngineConnFactory w .jarOfClass(classOf[Driver]) .getOrElse( throw HiveSessionStartFailedException( - 40012, - "cannot find hive-exec.jar, start session failed!" + HIVE_EXEC_JAR_ERROR.getErrorCode, + HIVE_EXEC_JAR_ERROR.getErrorDesc ) ) ) diff --git a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/exception/NotSupportedHiveTypeException.scala b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/exception/NotSupportedHiveTypeException.scala index 84e617e10eb..8416ae5e9d9 100644 --- a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/exception/NotSupportedHiveTypeException.scala +++ b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/exception/NotSupportedHiveTypeException.scala @@ -19,20 +19,8 @@ package org.apache.linkis.engineplugin.hive.exception import org.apache.linkis.common.exception.ErrorException -case class NotSupportedHiveTypeException(errCode: Int, desc: String) - extends ErrorException(errCode, desc) {} - -case class HadoopConfSetFailedException(errCode: Int, desc: String) - extends ErrorException(errCode, desc) {} - case class HiveSessionStartFailedException(erroCode: Int, desc: String) - extends ErrorException(erroCode, desc) {} + extends ErrorException(erroCode, desc) -/** - * @param erroCode - * 41004 - * @param desc - * hive query fail - */ case class HiveQueryFailedException(erroCode: Int, desc: String) - extends ErrorException(erroCode, desc) {} + extends ErrorException(erroCode, desc) diff --git a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala index f1bc2091445..7a1d119b0eb 100644 --- a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala @@ -28,6 +28,7 @@ import org.apache.linkis.engineconn.core.EngineConnObject import org.apache.linkis.engineconn.executor.entity.ResourceFetchExecutor import org.apache.linkis.engineplugin.hive.conf.{Counters, HiveEngineConfiguration} import org.apache.linkis.engineplugin.hive.cs.CSHiveHelper +import org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.GET_FIELD_SCHEMAS_ERROR import org.apache.linkis.engineplugin.hive.exception.HiveQueryFailedException import org.apache.linkis.engineplugin.hive.progress.HiveProgressHelper import org.apache.linkis.governance.common.paser.SQLCodeParser @@ -234,7 +235,11 @@ class HiveEngineConnExecutor( val fieldSchemas = if (hiveResponse.getSchema != null) hiveResponse.getSchema.getFieldSchemas else if (driver.getSchema != null) driver.getSchema.getFieldSchemas - else throw HiveQueryFailedException(41005, "cannot get the field schemas.") + else + throw HiveQueryFailedException( + GET_FIELD_SCHEMAS_ERROR.getErrorCode, + GET_FIELD_SCHEMAS_ERROR.getErrorDesc + ) LOG.debug("fieldSchemas are " + fieldSchemas) if (fieldSchemas == null || isNoResultSql(realCode)) { diff --git a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/hook/HiveAddMetaTableNameHook.scala b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/hook/HiveAddMetaTableNameHook.scala index 85c538c8d33..8720e527f20 100644 --- a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/hook/HiveAddMetaTableNameHook.scala +++ b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/hook/HiveAddMetaTableNameHook.scala @@ -21,6 +21,7 @@ import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook +import org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.INVALID_VALUE import org.apache.linkis.engineplugin.hive.exception.HiveQueryFailedException import org.apache.commons.lang3.StringUtils @@ -78,8 +79,8 @@ class HiveAddMetaTableNameHook extends ComputationExecutorHook with Logging { engineExecutionContext.setEnableResultsetMetaWithTableName(boolValue) } { case e: IllegalArgumentException => throw HiveQueryFailedException( - 41006, - s"Invalid value : ${value} in param [${mather.group()}]" + INVALID_VALUE.getErrorCode, + INVALID_VALUE.getErrorDesc.concat(s" : ${value} in param [${mather.group()}]") ) } } diff --git a/linkis-engineconn-plugins/hive/src/test/java/org/apache/linkis/common/errorcode/HiveErrorCodeSummaryTest.java b/linkis-engineconn-plugins/hive/src/test/java/org/apache/linkis/common/errorcode/HiveErrorCodeSummaryTest.java new file mode 100644 index 00000000000..9ebb36d6af1 --- /dev/null +++ b/linkis-engineconn-plugins/hive/src/test/java/org/apache/linkis/common/errorcode/HiveErrorCodeSummaryTest.java @@ -0,0 +1,166 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.errorcode; + +import org.junit.jupiter.api.Test; + +import static org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.CREATE_HIVE_EXECUTOR_ERROR; +import static org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.GET_FIELD_SCHEMAS_ERROR; +import static org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.HIVE_EXEC_JAR_ERROR; +import static org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.INVALID_VALUE; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class HiveErrorCodeSummaryTest { + @Test + void testGetErrorCode() { + assertEquals(26040, CREATE_HIVE_EXECUTOR_ERROR.getErrorCode()); + assertEquals(26041, HIVE_EXEC_JAR_ERROR.getErrorCode()); + assertEquals(26042, GET_FIELD_SCHEMAS_ERROR.getErrorCode()); + assertEquals(26043, INVALID_VALUE.getErrorCode()); + } + + @Test + void testSetErrorCode() { + CREATE_HIVE_EXECUTOR_ERROR.setErrorCode(1); + assertEquals(1, CREATE_HIVE_EXECUTOR_ERROR.getErrorCode()); + CREATE_HIVE_EXECUTOR_ERROR.setErrorCode(26040); + assertEquals(26040, CREATE_HIVE_EXECUTOR_ERROR.getErrorCode()); + + HIVE_EXEC_JAR_ERROR.setErrorCode(1); + assertEquals(1, HIVE_EXEC_JAR_ERROR.getErrorCode()); + HIVE_EXEC_JAR_ERROR.setErrorCode(26041); + assertEquals(26041, HIVE_EXEC_JAR_ERROR.getErrorCode()); + + GET_FIELD_SCHEMAS_ERROR.setErrorCode(1); + assertEquals(1, GET_FIELD_SCHEMAS_ERROR.getErrorCode()); + GET_FIELD_SCHEMAS_ERROR.setErrorCode(26042); + assertEquals(26042, GET_FIELD_SCHEMAS_ERROR.getErrorCode()); + + INVALID_VALUE.setErrorCode(1); + assertEquals(1, INVALID_VALUE.getErrorCode()); + INVALID_VALUE.setErrorCode(26043); + assertEquals(26043, INVALID_VALUE.getErrorCode()); + } + + @Test + void testGetErrorDesc() { + assertEquals( + "failed to create hive executor(创建hive执行器失败)", CREATE_HIVE_EXECUTOR_ERROR.getErrorDesc()); + assertEquals( + "cannot find hive-exec.jar, start session failed(找不到 hive-exec.jar,启动会话失败)", + HIVE_EXEC_JAR_ERROR.getErrorDesc()); + assertEquals("cannot get the field schemas(无法获取字段模式)", GET_FIELD_SCHEMAS_ERROR.getErrorDesc()); + assertEquals("invalid value(无效值)", INVALID_VALUE.getErrorDesc()); + } + + @Test + void testSetErrorDesc() { + CREATE_HIVE_EXECUTOR_ERROR.setErrorDesc("test"); + assertEquals("test", CREATE_HIVE_EXECUTOR_ERROR.getErrorDesc()); + CREATE_HIVE_EXECUTOR_ERROR.setErrorDesc("failed to create hive executor(创建hive执行器失败)"); + assertEquals( + "failed to create hive executor(创建hive执行器失败)", CREATE_HIVE_EXECUTOR_ERROR.getErrorDesc()); + + HIVE_EXEC_JAR_ERROR.setErrorDesc("test"); + assertEquals("test", HIVE_EXEC_JAR_ERROR.getErrorDesc()); + HIVE_EXEC_JAR_ERROR.setErrorDesc( + "cannot find hive-exec.jar, start session failed(找不到 hive-exec.jar,启动会话失败)"); + assertEquals( + "cannot find hive-exec.jar, start session failed(找不到 hive-exec.jar,启动会话失败)", + HIVE_EXEC_JAR_ERROR.getErrorDesc()); + + GET_FIELD_SCHEMAS_ERROR.setErrorDesc("test"); + assertEquals("test", GET_FIELD_SCHEMAS_ERROR.getErrorDesc()); + GET_FIELD_SCHEMAS_ERROR.setErrorDesc("cannot get the field schemas(无法获取字段模式)"); + assertEquals("cannot get the field schemas(无法获取字段模式)", GET_FIELD_SCHEMAS_ERROR.getErrorDesc()); + + INVALID_VALUE.setErrorDesc("test"); + assertEquals("test", INVALID_VALUE.getErrorDesc()); + INVALID_VALUE.setErrorDesc("invalid value(无效值)"); + assertEquals("invalid value(无效值)", INVALID_VALUE.getErrorDesc()); + } + + @Test + void testGetComment() { + assertEquals( + "failed to create hive executor(创建hive执行器失败)", CREATE_HIVE_EXECUTOR_ERROR.getComment()); + assertEquals( + "cannot find hive-exec.jar, start session failed(找不到 hive-exec.jar,启动会话失败)", + HIVE_EXEC_JAR_ERROR.getComment()); + assertEquals("cannot get the field schemas(无法获取字段模式)", GET_FIELD_SCHEMAS_ERROR.getComment()); + assertEquals("invalid value(无效值)", INVALID_VALUE.getComment()); + } + + @Test + void testSetComment() { + CREATE_HIVE_EXECUTOR_ERROR.setComment("test"); + assertEquals("test", CREATE_HIVE_EXECUTOR_ERROR.getComment()); + CREATE_HIVE_EXECUTOR_ERROR.setComment("failed to create hive executor(创建hive执行器失败)"); + assertEquals( + "failed to create hive executor(创建hive执行器失败)", CREATE_HIVE_EXECUTOR_ERROR.getComment()); + + HIVE_EXEC_JAR_ERROR.setComment("test"); + assertEquals("test", HIVE_EXEC_JAR_ERROR.getComment()); + HIVE_EXEC_JAR_ERROR.setComment( + "cannot find hive-exec.jar, start session failed(找不到 hive-exec.jar,启动会话失败)"); + assertEquals( + "cannot find hive-exec.jar, start session failed(找不到 hive-exec.jar,启动会话失败)", + HIVE_EXEC_JAR_ERROR.getComment()); + + GET_FIELD_SCHEMAS_ERROR.setComment("test"); + assertEquals("test", GET_FIELD_SCHEMAS_ERROR.getComment()); + GET_FIELD_SCHEMAS_ERROR.setComment("cannot get the field schemas(无法获取字段模式)"); + assertEquals("cannot get the field schemas(无法获取字段模式)", GET_FIELD_SCHEMAS_ERROR.getComment()); + + INVALID_VALUE.setComment("test"); + assertEquals("test", INVALID_VALUE.getComment()); + INVALID_VALUE.setComment("invalid value(无效值)"); + assertEquals("invalid value(无效值)", INVALID_VALUE.getComment()); + } + + @Test + void testGetModule() { + assertEquals("hiveEngine", CREATE_HIVE_EXECUTOR_ERROR.getModule()); + assertEquals("hiveEngine", HIVE_EXEC_JAR_ERROR.getModule()); + assertEquals("hiveEngine", GET_FIELD_SCHEMAS_ERROR.getModule()); + assertEquals("hiveEngine", INVALID_VALUE.getModule()); + } + + @Test + void testSetModule() { + CREATE_HIVE_EXECUTOR_ERROR.setModule("test"); + assertEquals("test", CREATE_HIVE_EXECUTOR_ERROR.getModule()); + CREATE_HIVE_EXECUTOR_ERROR.setModule("hiveEngine"); + assertEquals("hiveEngine", CREATE_HIVE_EXECUTOR_ERROR.getModule()); + + HIVE_EXEC_JAR_ERROR.setModule("test"); + assertEquals("test", HIVE_EXEC_JAR_ERROR.getModule()); + HIVE_EXEC_JAR_ERROR.setModule("hiveEngine"); + assertEquals("hiveEngine", HIVE_EXEC_JAR_ERROR.getModule()); + + GET_FIELD_SCHEMAS_ERROR.setModule("test"); + assertEquals("test", GET_FIELD_SCHEMAS_ERROR.getModule()); + GET_FIELD_SCHEMAS_ERROR.setModule("hiveEngine"); + assertEquals("hiveEngine", GET_FIELD_SCHEMAS_ERROR.getModule()); + + INVALID_VALUE.setModule("test"); + assertEquals("test", INVALID_VALUE.getModule()); + INVALID_VALUE.setModule("hiveEngine"); + assertEquals("hiveEngine", INVALID_VALUE.getModule()); + } +} diff --git a/linkis-engineconn-plugins/io_file/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/io_file/src/main/assembly/distribution.xml index 04adcab9d35..00ee58ad322 100644 --- a/linkis-engineconn-plugins/io_file/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/io_file/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-engineplugin-io_file dir diff --git a/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala b/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala index 874fc1630b3..90e0a0e029f 100644 --- a/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala @@ -43,6 +43,11 @@ import org.apache.linkis.scheduler.executer.{ } import org.apache.linkis.storage.FSFactory import org.apache.linkis.storage.domain.{MethodEntity, MethodEntitySerializer} +import org.apache.linkis.storage.errorcode.LinkisIoFileErrorCodeSummary.{ + FS_CAN_NOT_PROXY_TO, + NOT_EXISTS_METHOD, + PARAMETER_CALLS +} import org.apache.linkis.storage.exception.{StorageErrorCode, StorageErrorException} import org.apache.linkis.storage.fs.FileSystem import org.apache.linkis.storage.utils.StorageUtils @@ -137,7 +142,10 @@ class IoEngineConnExecutor(val id: Int, val outputLimit: Int = 10) case "available" => val fs = getUserFS(method) if (method.params == null || method.params.length != 2) { - throw new StorageErrorException(53003, "Unsupported parameter calls") + throw new StorageErrorException( + PARAMETER_CALLS.getErrorCode, + PARAMETER_CALLS.getErrorDesc + ) } val dest = MethodEntitySerializer.deserializerToJavaObject( method.params(0).asInstanceOf[String], @@ -159,7 +167,10 @@ class IoEngineConnExecutor(val id: Int, val outputLimit: Int = 10) case "renameTo" => val fs = getUserFS(method) if (method.params == null || method.params.length != 2) { - throw new StorageErrorException(53003, "Unsupported parameter calls") + throw new StorageErrorException( + PARAMETER_CALLS.getErrorCode, + PARAMETER_CALLS.getErrorDesc + ) } fs.renameTo( MethodEntitySerializer @@ -172,7 +183,10 @@ class IoEngineConnExecutor(val id: Int, val outputLimit: Int = 10) SuccessExecuteResponse() case "list" => if (method.params == null || method.params.length != 1) { - throw new StorageErrorException(53003, "Unsupported parameter calls") + throw new StorageErrorException( + PARAMETER_CALLS.getErrorCode, + PARAMETER_CALLS.getErrorDesc + ) } val fs = getUserFS(method) val dest = MethodEntitySerializer.deserializerToJavaObject( @@ -187,7 +201,10 @@ class IoEngineConnExecutor(val id: Int, val outputLimit: Int = 10) ) case "listPathWithError" => if (method.params == null || method.params.length != 1) { - throw new StorageErrorException(53003, "Unsupported parameter calls") + throw new StorageErrorException( + PARAMETER_CALLS.getErrorCode, + PARAMETER_CALLS.getErrorDesc + ) } val fs = getUserFS(method).asInstanceOf[FileSystem] val dest = MethodEntitySerializer.deserializerToJavaObject( @@ -305,7 +322,10 @@ class IoEngineConnExecutor(val id: Int, val outputLimit: Int = 10) val properties = methodEntity.params(0).asInstanceOf[Map[String, String]] val proxyUser = methodEntity.proxyUser if (!fsProxyService.canProxyUser(methodEntity.creatorUser, proxyUser, methodEntity.fsType)) { - throw new StorageErrorException(52002, s"FS Can not proxy to:$proxyUser") + throw new StorageErrorException( + FS_CAN_NOT_PROXY_TO.getErrorCode, + s"FS Can not proxy to:$proxyUser" + ) } if (!userFSInfos.containsKey(proxyUser)) { userFSInfos synchronized { @@ -361,7 +381,7 @@ class IoEngineConnExecutor(val id: Int, val outputLimit: Int = 10) .find(_.getGenericParameterTypes.length == parameterSize) if (realMethod.isEmpty) { throw new StorageErrorException( - 53003, + NOT_EXISTS_METHOD.getErrorCode, s"not exists method $methodName in fs ${fs.getClass.getSimpleName}." ) } diff --git a/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/utils/IOHelp.scala b/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/utils/IOHelp.scala index b4dcc64f5c4..b5e181cc336 100644 --- a/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/utils/IOHelp.scala +++ b/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/utils/IOHelp.scala @@ -20,6 +20,10 @@ package org.apache.linkis.manager.engineplugin.io.utils import org.apache.linkis.common.io.{Fs, FsPath} import org.apache.linkis.common.utils.Utils import org.apache.linkis.storage.domain.{MethodEntity, MethodEntitySerializer} +import org.apache.linkis.storage.errorcode.LinkisIoFileErrorCodeSummary.{ + CANNOT_BE_EMPTY, + PARAMETER_CALLS +} import org.apache.linkis.storage.exception.StorageErrorException import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetReader, ResultSetWriter} import org.apache.linkis.storage.resultset.io.{IOMetaData, IORecord} @@ -39,10 +43,7 @@ object IOHelp { */ def read(fs: Fs, method: MethodEntity): String = { if (method.params == null || method.params.isEmpty) { - throw new StorageErrorException( - 53002, - "The read method parameter cannot be empty(read方法参数不能为空)" - ) + throw new StorageErrorException(CANNOT_BE_EMPTY.getErrorCode, CANNOT_BE_EMPTY.getErrorDesc) } val dest = MethodEntitySerializer.deserializerToJavaObject( method.params(0).asInstanceOf[String], @@ -73,7 +74,8 @@ object IOHelp { writer.addMetaData(ioMetaData) writer.addRecord(ioRecord) writer.toString() - } else throw new StorageErrorException(53003, "Unsupported parameter call(不支持的参数调用)") + } else + throw new StorageErrorException(PARAMETER_CALLS.getErrorCode, PARAMETER_CALLS.getErrorDesc) }(IOUtils.closeQuietly(inputStream)) } @@ -84,7 +86,7 @@ object IOHelp { */ def write(fs: Fs, method: MethodEntity): Unit = { if (method.params == null || method.params.isEmpty) { - throw new StorageErrorException(53003, "Unsupported parameter call(不支持的参数调用)") + throw new StorageErrorException(PARAMETER_CALLS.getErrorCode, PARAMETER_CALLS.getErrorDesc) } val dest = MethodEntitySerializer.deserializerToJavaObject( method.params(0).asInstanceOf[String], diff --git a/linkis-engineconn-plugins/jdbc/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/jdbc/src/main/assembly/distribution.xml index 573caf889aa..2083703a3f0 100644 --- a/linkis-engineconn-plugins/jdbc/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/jdbc/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-manager-enginePlugin-jdbc dir diff --git a/linkis-engineconn-plugins/openlookeng/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/openlookeng/src/main/assembly/distribution.xml index 60c9dfaaba2..b0219d07d01 100644 --- a/linkis-engineconn-plugins/openlookeng/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/openlookeng/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-engineplugin-openlookeng dir diff --git a/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/errorcode/OpenLooKengErrorCodeSummary.java b/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/errorcode/OpenLooKengErrorCodeSummary.java new file mode 100644 index 00000000000..1971eee0959 --- /dev/null +++ b/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/errorcode/OpenLooKengErrorCodeSummary.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineplugin.openlookeng.errorcode; + +import org.apache.linkis.common.errorcode.ErrorCodeUtils; + +public enum OpenLooKengErrorCodeSummary { + /** + * 10000-10999 linkis-frame 11000-12999 linkis-commons 13000-14999 linkis-spring-cloud-services + * 15000-19999 linkis-public-enhancements 20000-24999 linkis-computation-governance 25000-25999 + * linkis-extensions 26000-29999 linkis-engineconn-plugins + */ + OPENLOOKENG_CLIENT_ERROR( + 26030, + "openlookeng client error(openlookeng客户端异常)", + "openlookeng client is abnormal due to some circumstances(openlookeng client由于某些情况异常)", + "jdbcEngineConnExecutor"), + + OPENLOOKENG_STATUS_ERROR( + 26031, + "openlookeng status error,Statement is not finished(openlookeng状态异常, 查询语句未完成)", + "The status of openlookeng is abnormal, and the query statement cannot be executed and ended(openlookeng状态出现异常,查询语句无法执行结束)", + "jdbcEngineConnExecutor"); + + private int errorCode; + + private String errorDesc; + + private String comment; + + private String module; + + OpenLooKengErrorCodeSummary(int errorCode, String errorDesc, String comment, String module) { + ErrorCodeUtils.validateErrorCode(errorCode, 26000, 29999); + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + this.module = module; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + public String getModule() { + return module; + } + + public void setModule(String module) { + this.module = module; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/executor/OpenLooKengEngineConnExecutor.java b/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/executor/OpenLooKengEngineConnExecutor.java index 994ff2a5077..d964195c061 100644 --- a/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/executor/OpenLooKengEngineConnExecutor.java +++ b/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/executor/OpenLooKengEngineConnExecutor.java @@ -80,6 +80,8 @@ import static org.apache.linkis.engineplugin.openlookeng.conf.OpenLooKengConfiguration.OPENLOOKENG_HTTP_CONNECT_TIME_OUT; import static org.apache.linkis.engineplugin.openlookeng.conf.OpenLooKengConfiguration.OPENLOOKENG_HTTP_READ_TIME_OUT; +import static org.apache.linkis.engineplugin.openlookeng.errorcode.OpenLooKengErrorCodeSummary.OPENLOOKENG_CLIENT_ERROR; +import static org.apache.linkis.engineplugin.openlookeng.errorcode.OpenLooKengErrorCodeSummary.OPENLOOKENG_STATUS_ERROR; public class OpenLooKengEngineConnExecutor extends ConcurrentComputationExecutor { @@ -375,10 +377,11 @@ private ErrorExecuteResponse verifyServerError( } else if (statement.isClientAborted()) { LOG.warn("openlookeng statement is killed."); } else if (statement.isClientError()) { - throw new OpenLooKengClientException(60001, "openlookeng client error."); + throw new OpenLooKengClientException( + OPENLOOKENG_CLIENT_ERROR.getErrorCode(), OPENLOOKENG_CLIENT_ERROR.getErrorDesc()); } else { throw new OpenLooKengStateInvalidException( - 60002, "openlookeng status error. Statement is not finished."); + OPENLOOKENG_STATUS_ERROR.getErrorCode(), OPENLOOKENG_STATUS_ERROR.getErrorDesc()); } return null; } diff --git a/linkis-engineconn-plugins/openlookeng/src/test/java/org/apache/linkis/engineplugin/openlookeng/errorcode/OpenLooKengErrorCodeSummaryTest.java b/linkis-engineconn-plugins/openlookeng/src/test/java/org/apache/linkis/engineplugin/openlookeng/errorcode/OpenLooKengErrorCodeSummaryTest.java new file mode 100644 index 00000000000..a76a5e99dd3 --- /dev/null +++ b/linkis-engineconn-plugins/openlookeng/src/test/java/org/apache/linkis/engineplugin/openlookeng/errorcode/OpenLooKengErrorCodeSummaryTest.java @@ -0,0 +1,119 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineplugin.openlookeng.errorcode; + +import org.junit.jupiter.api.Test; + +import static org.apache.linkis.engineplugin.openlookeng.errorcode.OpenLooKengErrorCodeSummary.OPENLOOKENG_CLIENT_ERROR; +import static org.apache.linkis.engineplugin.openlookeng.errorcode.OpenLooKengErrorCodeSummary.OPENLOOKENG_STATUS_ERROR; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class OpenLooKengErrorCodeSummaryTest { + @Test + void testGetErrorCode() { + assertEquals(26030, OPENLOOKENG_CLIENT_ERROR.getErrorCode()); + assertEquals(26031, OPENLOOKENG_STATUS_ERROR.getErrorCode()); + } + + @Test + void testSetErrorCode() { + OPENLOOKENG_CLIENT_ERROR.setErrorCode(1); + assertEquals(1, OPENLOOKENG_CLIENT_ERROR.getErrorCode()); + OPENLOOKENG_CLIENT_ERROR.setErrorCode(26030); + assertEquals(26030, OPENLOOKENG_CLIENT_ERROR.getErrorCode()); + + OPENLOOKENG_STATUS_ERROR.setErrorCode(1); + assertEquals(1, OPENLOOKENG_STATUS_ERROR.getErrorCode()); + OPENLOOKENG_STATUS_ERROR.setErrorCode(26031); + assertEquals(26031, OPENLOOKENG_STATUS_ERROR.getErrorCode()); + } + + @Test + void testGetErrorDesc() { + assertEquals( + "openlookeng client error(openlookeng客户端异常)", OPENLOOKENG_CLIENT_ERROR.getErrorDesc()); + assertEquals( + "openlookeng status error,Statement is not finished(openlookeng状态异常, 查询语句未完成)", + OPENLOOKENG_STATUS_ERROR.getErrorDesc()); + } + + @Test + void testSetErrorDesc() { + OPENLOOKENG_CLIENT_ERROR.setErrorDesc("test"); + assertEquals("test", OPENLOOKENG_CLIENT_ERROR.getErrorDesc()); + OPENLOOKENG_CLIENT_ERROR.setErrorDesc("openlookeng client error(openlookeng客户端异常)"); + assertEquals( + "openlookeng client error(openlookeng客户端异常)", OPENLOOKENG_CLIENT_ERROR.getErrorDesc()); + + OPENLOOKENG_STATUS_ERROR.setErrorDesc("test"); + assertEquals("test", OPENLOOKENG_STATUS_ERROR.getErrorDesc()); + OPENLOOKENG_STATUS_ERROR.setErrorDesc( + "openlookeng status error,Statement is not finished(openlookeng状态异常, 查询语句未完成)"); + assertEquals( + "openlookeng status error,Statement is not finished(openlookeng状态异常, 查询语句未完成)", + OPENLOOKENG_STATUS_ERROR.getErrorDesc()); + } + + @Test + void testGetComment() { + assertEquals( + "openlookeng client is abnormal due to some circumstances(openlookeng client由于某些情况异常)", + OPENLOOKENG_CLIENT_ERROR.getComment()); + assertEquals( + "The status of openlookeng is abnormal, and the query statement cannot be executed and ended(openlookeng状态出现异常,查询语句无法执行结束)", + OPENLOOKENG_STATUS_ERROR.getComment()); + } + + @Test + void testSetComment() { + OPENLOOKENG_CLIENT_ERROR.setComment("test"); + assertEquals("test", OPENLOOKENG_CLIENT_ERROR.getComment()); + OPENLOOKENG_CLIENT_ERROR.setComment( + "openlookeng client is abnormal due to some circumstances(openlookeng client由于某些情况异常)"); + assertEquals( + "openlookeng client is abnormal due to some circumstances(openlookeng client由于某些情况异常)", + OPENLOOKENG_CLIENT_ERROR.getComment()); + + OPENLOOKENG_STATUS_ERROR.setComment("test"); + assertEquals("test", OPENLOOKENG_STATUS_ERROR.getComment()); + OPENLOOKENG_STATUS_ERROR.setComment( + "The status of openlookeng is abnormal, and the query statement cannot be executed and ended(openlookeng状态出现异常,查询语句无法执行结束)"); + assertEquals( + "The status of openlookeng is abnormal, and the query statement cannot be executed and ended(openlookeng状态出现异常,查询语句无法执行结束)", + OPENLOOKENG_STATUS_ERROR.getComment()); + } + + @Test + void testGetModule() { + assertEquals("jdbcEngineConnExecutor", OPENLOOKENG_CLIENT_ERROR.getModule()); + assertEquals("jdbcEngineConnExecutor", OPENLOOKENG_STATUS_ERROR.getModule()); + } + + @Test + void testSetModule() { + OPENLOOKENG_CLIENT_ERROR.setModule("test"); + assertEquals("test", OPENLOOKENG_CLIENT_ERROR.getModule()); + OPENLOOKENG_CLIENT_ERROR.setModule("jdbcEngineConnExecutor"); + assertEquals("jdbcEngineConnExecutor", OPENLOOKENG_CLIENT_ERROR.getModule()); + + OPENLOOKENG_STATUS_ERROR.setModule("test"); + assertEquals("test", OPENLOOKENG_STATUS_ERROR.getModule()); + OPENLOOKENG_STATUS_ERROR.setModule("jdbcEngineConnExecutor"); + assertEquals("jdbcEngineConnExecutor", OPENLOOKENG_STATUS_ERROR.getModule()); + } +} diff --git a/linkis-engineconn-plugins/pipeline/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/pipeline/src/main/assembly/distribution.xml index e48771a5b09..85a477c94d9 100644 --- a/linkis-engineconn-plugins/pipeline/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/pipeline/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-enginePlugin-pipeline dir diff --git a/linkis-engineconn-plugins/presto/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/presto/src/main/assembly/distribution.xml index 76f3e9cf367..a1b49206aba 100644 --- a/linkis-engineconn-plugins/presto/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/presto/src/main/assembly/distribution.xml @@ -16,10 +16,8 @@ ~ limitations under the License. --> - + linkis-engineplugin-presto dir diff --git a/linkis-engineconn-plugins/python/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/python/src/main/assembly/distribution.xml index d2a030e574f..83438871a85 100644 --- a/linkis-engineconn-plugins/python/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/python/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-engineplugin-python dir diff --git a/linkis-engineconn-plugins/shell/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/shell/src/main/assembly/distribution.xml index 40a7db106ef..51478862b11 100755 --- a/linkis-engineconn-plugins/shell/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/shell/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-enginePlugin-shell dir diff --git a/linkis-engineconn-plugins/spark/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/spark/src/main/assembly/distribution.xml index 824af8d7258..b1de6a751cc 100644 --- a/linkis-engineconn-plugins/spark/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/spark/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-enginePlugin-spark dir diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala index 4479992eaca..75a380a0786 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala @@ -37,6 +37,7 @@ import org.apache.linkis.storage.resultset.ResultSetWriter import org.apache.commons.exec.CommandLine import org.apache.commons.io.IOUtils import org.apache.commons.lang3.{RandomStringUtils, StringUtils} +import org.apache.spark.SparkConf import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.sql.{DataFrame, SparkSession} import org.apache.spark.sql.execution.datasources.csv.UDF @@ -89,9 +90,9 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In } } - def getSparkConf: Unit = sc.getConf + def getSparkConf: SparkConf = sc.getConf - def getJavaSparkContext: Unit = new JavaSparkContext(sc) + def getJavaSparkContext: JavaSparkContext = new JavaSparkContext(sc) def getSparkSession: Object = if (sparkSession != null) sparkSession else () => throw new IllegalAccessException("not supported keyword spark in spark1.x versions") diff --git a/linkis-engineconn-plugins/sqoop/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/sqoop/src/main/assembly/distribution.xml index 52264659d31..3c711f4a4d9 100644 --- a/linkis-engineconn-plugins/sqoop/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/sqoop/src/main/assembly/distribution.xml @@ -17,10 +17,8 @@ */ --> - + sqoop dir diff --git a/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java b/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java index d57d9bd3a6c..b86ed3821c0 100644 --- a/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java +++ b/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java @@ -51,6 +51,9 @@ import com.cloudera.sqoop.tool.SqoopTool; import com.cloudera.sqoop.util.OptionsFileUtil; +import static org.apache.linkis.engineconnplugin.sqoop.client.errorcode.SqoopErrorCodeSummary.ERROR_IN_CLOSING; +import static org.apache.linkis.engineconnplugin.sqoop.client.errorcode.SqoopErrorCodeSummary.UNABLE_TO_CLOSE; + /** * Main entry-point for Sqoop Usage: hadoop jar (this_jar_name) com.cloudera.sqoop.Sqoop (options) * See the SqoopOptions class for options. @@ -319,11 +322,10 @@ public static void close() throws JobClosableException { } catch (Exception e) { // Ignore } - throw new JobClosableException( - "Unable to close the mapReduce job related to cluster [" + cluster + "]", se); + throw new JobClosableException(UNABLE_TO_CLOSE.getErrorDesc() + "[" + cluster + "]", se); } } catch (IOException | SQLException e) { - throw new JobClosableException("Error in closing sqoop client", e); + throw new JobClosableException(ERROR_IN_CLOSING.getErrorDesc(), e); } } diff --git a/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/errorcode/SqoopErrorCodeSummary.java b/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/errorcode/SqoopErrorCodeSummary.java new file mode 100644 index 00000000000..6d44d5be64d --- /dev/null +++ b/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/errorcode/SqoopErrorCodeSummary.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.client.errorcode; + +public enum SqoopErrorCodeSummary { + ERROR_IN_CLOSING_ID(16025, "", ""), + UNABLE_TO_CLOSE( + 16025, + "Unable to close the mapReduce job related to cluster(无法关闭与集群相关的 mapReduce 作业)", + "Unable to close the mapReduce job related to cluster(无法关闭与集群相关的 mapReduce 作业)"), + ERROR_IN_CLOSING( + 16025, + "Error in closing sqoop client(关闭 sqoop 客户端时出错)", + "Error in closing sqoop client(关闭 sqoop 客户端时出错)"), + NOT_SUPPORT_METHON_ID(16023, "", ""), + NOT_SUPPORT_METHON( + 16023, + "Not support method for requestExpectedResource.(不支持 requestExpectedResource 的方法)", + "Not support method for requestExpectedResource.(不支持 requestExpectedResource 的方法)"), + EXEC_SQOOP_CODE_ERROR( + 16023, "Exec Sqoop Code Error(执行 Sqoop 代码错误)", "Exec Sqoop Code Error(执行 Sqoop 代码错误)"), + NEW_A_INSTANCE_OF( + 16023, + "New a instance of {} failed!(新建 {} 实例失败!)", + "New a instance of {} failed!(新建 {} 实例失败!)"); + + /** (errorCode)错误码 */ + private int errorCode; + /** (errorDesc)错误描述 */ + private String errorDesc; + /** Possible reasons for the error(错误可能出现的原因) */ + private String comment; + + SqoopErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java b/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java index 92bc0a0239f..49fa23d7fca 100644 --- a/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java +++ b/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java @@ -19,18 +19,18 @@ import org.apache.linkis.common.exception.ErrorException; +import static org.apache.linkis.engineconnplugin.sqoop.client.errorcode.SqoopErrorCodeSummary.ERROR_IN_CLOSING_ID; + /** Exception in closing/destroying the job */ public class JobClosableException extends ErrorException { private static final long serialVersionUID = 1L; - public static final int ERROR_CODE = 16025; - public JobClosableException(String message) { - super(ERROR_CODE, message); + super(ERROR_IN_CLOSING_ID.getErrorCode(), message); } public JobClosableException(String message, Throwable e) { - super(ERROR_CODE, message); + super(ERROR_IN_CLOSING_ID.getErrorCode(), message); this.initCause(e); } } diff --git a/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java b/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java index 3f662b5cc8a..870e46f2c7b 100644 --- a/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java +++ b/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java @@ -19,17 +19,17 @@ import org.apache.linkis.common.exception.ErrorException; +import static org.apache.linkis.engineconnplugin.sqoop.client.errorcode.SqoopErrorCodeSummary.NOT_SUPPORT_METHON_ID; + public class JobExecutionException extends ErrorException { private static final long serialVersionUID = 1L; - public static final int ERROR_CODE = 16023; - public JobExecutionException(String message) { - super(ERROR_CODE, message); + super(NOT_SUPPORT_METHON_ID.getErrorCode(), message); } public JobExecutionException(String message, Throwable e) { - super(ERROR_CODE, message); + super(NOT_SUPPORT_METHON_ID.getErrorCode(), message); this.initCause(e); } } diff --git a/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala index ba82fd2c4ff..8756b742915 100644 --- a/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala +++ b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala @@ -19,6 +19,7 @@ package org.apache.linkis.engineconnplugin.sqoop.executor import org.apache.linkis.engineconn.executor.entity.{LabelExecutor, ResourceExecutor, YarnExecutor} import org.apache.linkis.engineconnplugin.sqoop.client.Sqoop +import org.apache.linkis.engineconnplugin.sqoop.client.errorcode.SqoopErrorCodeSummary.NOT_SUPPORT_METHON import org.apache.linkis.engineconnplugin.sqoop.client.exception.JobExecutionException import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEngineConnContext import org.apache.linkis.engineconnplugin.sqoop.context.SqoopResourceConfiguration.LINKIS_QUEUE_NAME @@ -44,7 +45,7 @@ trait SqoopExecutor extends YarnExecutor with LabelExecutor with ResourceExecuto override def setExecutorLabels(labels: util.List[Label[_]]): Unit = this.executorLabels = labels override def requestExpectedResource(expectedResource: NodeResource): NodeResource = - throw new JobExecutionException("Not support method for requestExpectedResource.") + throw new JobExecutionException(NOT_SUPPORT_METHON.getErrorDesc) protected val sqoopEngineConnContext: SqoopEngineConnContext } diff --git a/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala index 9dd79654088..9b09d6b6130 100644 --- a/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala +++ b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala @@ -25,6 +25,7 @@ import org.apache.linkis.engineconn.once.executor.{ OperableOnceExecutor } import org.apache.linkis.engineconnplugin.sqoop.client.LinkisSqoopClient +import org.apache.linkis.engineconnplugin.sqoop.client.errorcode.SqoopErrorCodeSummary.EXEC_SQOOP_CODE_ERROR import org.apache.linkis.engineconnplugin.sqoop.client.exception.JobExecutionException import org.apache.linkis.engineconnplugin.sqoop.context.{ SqoopEngineConnContext, @@ -73,7 +74,7 @@ class SqoopOnceCodeExecutor( setResponse( ErrorExecuteResponse( "Run code failed!", - new JobExecutionException("Exec Sqoop Code Error") + new JobExecutionException(EXEC_SQOOP_CODE_ERROR.getErrorDesc) ) ) } diff --git a/linkis-engineconn-plugins/trino/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/trino/src/main/assembly/distribution.xml index b97db678380..3fa53ccb15b 100644 --- a/linkis-engineconn-plugins/trino/src/main/assembly/distribution.xml +++ b/linkis-engineconn-plugins/trino/src/main/assembly/distribution.xml @@ -16,10 +16,8 @@ ~ limitations under the License. --> - + linkis-manager-enginePlugin-trino dir diff --git a/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.scala b/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.scala index 42ac6e4cb40..e5c6f5186a8 100644 --- a/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.scala +++ b/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.scala @@ -17,48 +17,48 @@ package org.apache.linkis.engineplugin.trino.conf -import org.apache.linkis.common.conf.{ByteType, CommonVars} +import org.apache.linkis.common.conf.CommonVars import org.apache.linkis.storage.utils.StorageConfiguration import java.lang object TrinoConfiguration { - val ENGINE_CONCURRENT_LIMIT = CommonVars[Int]("wds.linkis.engineconn.concurrent.limit", 100) + val ENGINE_CONCURRENT_LIMIT = CommonVars[Int]("linkis.engineconn.concurrent.limit", 100) - val DEFAULT_LIMIT = CommonVars[Int]("wds.linkis.trino.default.limit", 5000) + val DEFAULT_LIMIT = CommonVars[Int]("linkis.trino.default.limit", 5000) val TRINO_HTTP_CONNECT_TIME_OUT = - CommonVars[java.lang.Long]("wds.linkis.trino.http.connectTimeout.seconds", new lang.Long(60)) + CommonVars[java.lang.Long]("linkis.trino.http.connectTimeout.seconds", new lang.Long(60)) val TRINO_HTTP_READ_TIME_OUT = - CommonVars[java.lang.Long]("wds.linkis.trino.http.readTimeout.seconds", new lang.Long(60)) + CommonVars[java.lang.Long]("linkis.trino.http.readTimeout.seconds", new lang.Long(60)) - val TRINO_URL = CommonVars[String]("wds.linkis.trino.url", "http://127.0.0.1:8080") + val TRINO_URL = CommonVars[String]("linkis.trino.url", "http://127.0.0.1:8080") - val TRINO_PASSWORD = CommonVars[String]("wds.linkis.trino.password", null) - val TRINO_PASSWORD_CMD = CommonVars[String]("wds.linkis.trino.password.cmd", null) - val TRINO_CATALOG = CommonVars[String]("wds.linkis.trino.catalog", "system") - val TRINO_SCHEMA = CommonVars[String]("wds.linkis.trino.schema", "") - val TRINO_SOURCE = CommonVars[String]("wds.linkis.trino.source", "global") + val TRINO_PASSWORD = CommonVars[String]("linkis.trino.password", null) + val TRINO_PASSWORD_CMD = CommonVars[String]("linkis.trino.password.cmd", null) + val TRINO_CATALOG = CommonVars[String]("linkis.trino.catalog", "system") + val TRINO_SCHEMA = CommonVars[String]("linkis.trino.schema", "") + val TRINO_SOURCE = CommonVars[String]("linkis.trino.source", "global") - val TRINO_SSL_INSECURED = CommonVars[Boolean]("wds.linkis.trino.ssl.insecured", true) - val TRINO_SSL_KEYSTORE = CommonVars[String]("wds.linkis.trino.ssl.keystore", null) - val TRINO_SSL_KEYSTORE_TYPE = CommonVars[String]("wds.linkis.trino.ssl.keystore.type", null) + val TRINO_SSL_INSECURED = CommonVars[Boolean]("linkis.trino.ssl.insecured", true) + val TRINO_SSL_KEYSTORE = CommonVars[String]("linkis.trino.ssl.keystore", null) + val TRINO_SSL_KEYSTORE_TYPE = CommonVars[String]("linkis.trino.ssl.keystore.type", null) val TRINO_SSL_KEYSTORE_PASSWORD = - CommonVars[String]("wds.linkis.trino.ssl.keystore.password", null) + CommonVars[String]("linkis.trino.ssl.keystore.password", null) - val TRINO_SSL_TRUSTSTORE = CommonVars[String]("wds.linkis.trino.ssl.truststore", null) - val TRINO_SSL_TRUSTSTORE_TYPE = CommonVars[String]("wds.linkis.trino.ssl.truststore.type", null) + val TRINO_SSL_TRUSTSTORE = CommonVars[String]("linkis.trino.ssl.truststore", null) + val TRINO_SSL_TRUSTSTORE_TYPE = CommonVars[String]("linkis.trino.ssl.truststore.type", null) val TRINO_SSL_TRUSTSTORE_PASSWORD = - CommonVars[String]("wds.linkis.trino.ssl.truststore.password", null) + CommonVars[String]("linkis.trino.ssl.truststore.password", null) - val TRINO_FORBID_GRANT = CommonVars[Boolean]("wds.linkis.trino.forbid.grant", true) + val TRINO_FORBID_GRANT = CommonVars[Boolean]("linkis.trino.forbid.grant", true) val TRINO_FORBID_MODIFY_SCHEMA = - CommonVars[Boolean]("wds.linkis.trino.forbid.modifySchema", true) + CommonVars[Boolean]("linkis.trino.forbid.modifySchema", true) val TRINO_USER_ISOLATION_MODE = CommonVars[Boolean]("linkis.trino.user.isolation.mode", false) diff --git a/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.scala b/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.scala index f9ecd2f0db4..a1089ec62a8 100644 --- a/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.scala @@ -20,7 +20,6 @@ package org.apache.linkis.engineplugin.trino.executor import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{OverloadUtils, Utils} import org.apache.linkis.engineconn.common.conf.{EngineConnConf, EngineConnConstant} -import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask import org.apache.linkis.engineconn.computation.executor.execute.{ ConcurrentComputationExecutor, EngineExecutionContext @@ -70,7 +69,7 @@ import javax.security.auth.callback.PasswordCallback import java.net.URI import java.util import java.util._ -import java.util.concurrent.{ConcurrentHashMap, TimeUnit} +import java.util.concurrent.{Callable, ConcurrentHashMap, TimeUnit} import java.util.function.Supplier import scala.collection.JavaConverters._ @@ -144,26 +143,6 @@ class TrinoEngineConnExecutor(override val outputPrintLimit: Int, val id: Int) super.init } - override def execute(engineConnTask: EngineConnTask): ExecuteResponse = { - val user = getCurrentUser(engineConnTask.getLables) - val userCreatorLabel = engineConnTask.getLables.find(_.isInstanceOf[UserCreatorLabel]).get - val engineTypeLabel = engineConnTask.getLables.find(_.isInstanceOf[EngineTypeLabel]).get - var configMap: util.Map[String, String] = null - if (userCreatorLabel != null && engineTypeLabel != null) { - configMap = TrinoEngineConfig.getCacheMap( - ( - userCreatorLabel.asInstanceOf[UserCreatorLabel], - engineTypeLabel.asInstanceOf[EngineTypeLabel] - ) - ) - } - clientSessionCache.put( - engineConnTask.getTaskId, - getClientSession(user, engineConnTask.getProperties, configMap) - ) - super.execute(engineConnTask) - } - override def executeLine( engineExecutorContext: EngineExecutionContext, code: String @@ -180,13 +159,36 @@ class TrinoEngineConnExecutor(override val outputPrintLimit: Int, val id: Int) TrinoCode.checkCode(realCode) logger.info(s"trino client begins to run psql code:\n $realCode") + val currentUser = getCurrentUser(engineExecutorContext.getLabels) val trinoUser = Optional .ofNullable(TRINO_DEFAULT_USER.getValue) .orElseGet(new Supplier[String] { - override def get(): String = getCurrentUser(engineExecutorContext.getLabels) + override def get(): String = currentUser }) val taskId = engineExecutorContext.getJobId.get - val clientSession = clientSessionCache.getIfPresent(taskId) + val clientSession = clientSessionCache.get( + taskId, + new Callable[ClientSession] { + override def call(): ClientSession = { + val userCreatorLabel = + engineExecutorContext.getLabels.find(_.isInstanceOf[UserCreatorLabel]).get + val engineTypeLabel = + engineExecutorContext.getLabels.find(_.isInstanceOf[EngineTypeLabel]).get + var configMap: util.Map[String, String] = null + if (userCreatorLabel != null && engineTypeLabel != null) { + configMap = Utils.tryAndError( + TrinoEngineConfig.getCacheMap( + ( + userCreatorLabel.asInstanceOf[UserCreatorLabel], + engineTypeLabel.asInstanceOf[EngineTypeLabel] + ) + ) + ) + } + getClientSession(currentUser, engineExecutorContext.getProperties, configMap) + } + } + ) val statement = StatementClientFactory.newStatementClient( okHttpClientCache.computeIfAbsent(trinoUser, buildOkHttpClient), clientSession, diff --git a/linkis-engineconn-plugins/trino/src/test/scala/org/apache/linkis/engineplugin/trino/executer/TestTrinoEngineConnExecutor.scala b/linkis-engineconn-plugins/trino/src/test/scala/org/apache/linkis/engineplugin/trino/executer/TestTrinoEngineConnExecutor.scala new file mode 100644 index 00000000000..d8d2d66be5a --- /dev/null +++ b/linkis-engineconn-plugins/trino/src/test/scala/org/apache/linkis/engineplugin/trino/executer/TestTrinoEngineConnExecutor.scala @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineplugin.trino.executer + +import org.apache.linkis.common.ServiceInstance +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.engineconn.common.creation.{ + DefaultEngineCreationContext, + EngineCreationContext +} +import org.apache.linkis.engineconn.computation.executor.entity.CommonEngineConnTask +import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext +import org.apache.linkis.engineconn.computation.executor.utlis.ComputationEngineConstant +import org.apache.linkis.engineplugin.trino.executor.TrinoEngineConnExecutor +import org.apache.linkis.engineplugin.trino.factory.TrinoEngineConnFactory +import org.apache.linkis.governance.common.conf.GovernanceCommonConf +import org.apache.linkis.governance.common.entity.ExecutionNodeStatus +import org.apache.linkis.governance.common.utils.EngineConnArgumentsParser +import org.apache.linkis.manager.engineplugin.common.launch.process.Environment +import org.apache.linkis.manager.label.builder.factory.{ + LabelBuilderFactory, + LabelBuilderFactoryContext +} +import org.apache.linkis.manager.label.entity.Label + +import java.util + +import scala.collection.JavaConversions._ +import scala.collection.mutable.ArrayBuffer + +import org.junit.jupiter.api.{Assertions, Test} + +class TestTrinoEngineConnExecutor { + + private val engineCreationContext: EngineCreationContext = new DefaultEngineCreationContext + + private val labelBuilderFactory: LabelBuilderFactory = + LabelBuilderFactoryContext.getLabelBuilderFactory + +// @Test + def testExecuteLine: Unit = { + val engineconnConf = "--engineconn-conf" + val springConf = "--spring-conf" + val array = Array( + engineconnConf, + "wds.linkis.rm.instance=10", + engineconnConf, + "label.userCreator=root-IDE", + engineconnConf, + "ticketId=037ab855-0c41-4323-970d-7f75e71883b6", + engineconnConf, + "label.engineType=trino", + engineconnConf, + "linkis.trino.url=https://trino.dev.com/hive/hivetest", + engineconnConf, + "linkis.trino.ssl.insecured=true", + engineconnConf, + "linkis.trino.default.start.user=root", + engineconnConf, + "linkis.trino.password=123456", + springConf, + "eureka.client.serviceUrl.defaultZone=http://127.0.0.1:8761/eureka/", + springConf, + "logging.config=classpath:log4j2.xml", + springConf, + "spring.profiles.active=engineconn", + springConf, + "server.port=35655", + springConf, + "spring.application.name=linkis-cg-engineconn" + ) + this.init(array) + val cmd = "SHOW SCHEMAS" + val taskId = "1" + val task = new CommonEngineConnTask(taskId, false) + val properties = new util.HashMap[String, Object] + task.setProperties(properties) + task.data(ComputationEngineConstant.LOCK_TYPE_NAME, "lock") + task.setStatus(ExecutionNodeStatus.Scheduled) + val engineFactory: TrinoEngineConnFactory = new TrinoEngineConnFactory + val engine = engineFactory.createEngineConn(engineCreationContext) + + val jdbcExecutor: TrinoEngineConnExecutor = engineFactory + .newExecutor(1, engineCreationContext, engine) + .asInstanceOf[TrinoEngineConnExecutor] + val engineExecutionContext = new EngineExecutionContext(jdbcExecutor, Utils.getJvmUser) + engineExecutionContext.setJobId(taskId) + val anyArray = engineCreationContext.getLabels().toArray() + engineExecutionContext.setLabels(anyArray.map(_.asInstanceOf[Label[_]])) + val testPath = this.getClass.getClassLoader.getResource("").getPath + engineExecutionContext.setStorePath(testPath) + engineCreationContext.getOptions.foreach({ case (key, value) => + engineExecutionContext.addProperty(key, value) + }) + Assertions.assertNotNull(jdbcExecutor.getProgressInfo(taskId)) + val response = jdbcExecutor.executeLine(engineExecutionContext, cmd) + Assertions.assertNotNull(response) + } + + private def init(args: Array[String]): Unit = { + val arguments = EngineConnArgumentsParser.getEngineConnArgumentsParser.parseToObj(args) + val engineConf = arguments.getEngineConnConfMap + this.engineCreationContext.setUser(engineConf.getOrElse("user", Utils.getJvmUser)) + this.engineCreationContext.setTicketId(engineConf.getOrElse("ticketId", "")) + val host = CommonVars(Environment.ECM_HOST.toString, "127.0.0.1").getValue + val port = CommonVars(Environment.ECM_PORT.toString, "80").getValue + this.engineCreationContext.setEMInstance( + ServiceInstance(GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME.getValue, s"$host:$port") + ) + val labels = new ArrayBuffer[Label[_]] + val labelArgs = engineConf.filter(_._1.startsWith(EngineConnArgumentsParser.LABEL_PREFIX)) + if (labelArgs.nonEmpty) { + labelArgs.foreach { case (key, value) => + labels += labelBuilderFactory + .createLabel[Label[_]](key.replace(EngineConnArgumentsParser.LABEL_PREFIX, ""), value) + } + engineCreationContext.setLabels(labels.toList) + } + val jMap = new java.util.HashMap[String, String](engineConf.size) + jMap.putAll(engineConf) + this.engineCreationContext.setOptions(jMap) + this.engineCreationContext.setArgs(args) + sys.props.putAll(jMap) + } + +} diff --git a/linkis-extensions/linkis-io-file-client/src/main/scala/org/apache/linkis/storage/io/iteraceptor/IOMethodInterceptor.scala b/linkis-extensions/linkis-io-file-client/src/main/scala/org/apache/linkis/storage/io/iteraceptor/IOMethodInterceptor.scala index b081aded988..92feb8a5616 100644 --- a/linkis-extensions/linkis-io-file-client/src/main/scala/org/apache/linkis/storage/io/iteraceptor/IOMethodInterceptor.scala +++ b/linkis-extensions/linkis-io-file-client/src/main/scala/org/apache/linkis/storage/io/iteraceptor/IOMethodInterceptor.scala @@ -22,6 +22,7 @@ import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.manager.label.entity.entrance.BindEngineLabel import org.apache.linkis.storage.domain.{FsPathListWithError, MethodEntity, MethodEntitySerializer} +import org.apache.linkis.storage.errorcode.LinkisIoFileClientErrorCodeSummary._ import org.apache.linkis.storage.exception.{FSNotInitException, StorageErrorException} import org.apache.linkis.storage.io.client.IOClient import org.apache.linkis.storage.io.utils.IOClientUtils @@ -36,7 +37,6 @@ import org.springframework.cglib.proxy.{MethodInterceptor, MethodProxy} import java.io.{InputStream, IOException, OutputStream} import java.lang.reflect.Method import java.net.InetAddress -import java.util import scala.beans.BeanProperty import scala.collection.JavaConverters._ @@ -48,7 +48,7 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging @BeanProperty var ioClient: IOClient = _ - private val properties: java.util.Map[String, String] = new util.HashMap[String, String] + private val properties: mutable.HashMap[String, String] = mutable.HashMap[String, String]() private var inited = false @@ -69,7 +69,7 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging label.setJobGroupId(IOClientUtils.generateJobGrupID()) } - def getProxyUser: String = StorageConfiguration.PROXY_USER.getValue(properties) + def getProxyUser: String = StorageConfiguration.PROXY_USER.getValue(properties.asJava) def getCreatorUser: String = StorageUtils.getJvmUser @@ -103,11 +103,8 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging } def initFS(methodName: String = "init"): Unit = { - if (!properties.asScala.contains(StorageConfiguration.PROXY_USER.key)) { - throw new StorageErrorException( - 52002, - "no user set, we cannot get the permission information." - ) + if (!properties.contains(StorageConfiguration.PROXY_USER.key)) { + throw new StorageErrorException(NO_PROXY_USER.getErrorCode, NO_PROXY_USER.getErrorDesc) } bindEngineLabel.setIsJobGroupHead("true") bindEngineLabel.setIsJobGroupEnd("false") @@ -120,7 +117,7 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging getProxyUser, getLocalIP, methodName, - Array(properties.asScala.toMap) + Array(properties.toMap) ), bindEngineLabel ) @@ -132,13 +129,18 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging inited = true bindEngineLabel.setIsJobGroupEnd("false") bindEngineLabel.setIsJobGroupHead("false") - } else throw new StorageErrorException(52002, s"Failed to init FS for user:$getProxyUser ") + } else { + throw new StorageErrorException( + FAILED_TO_INIT_USER.getErrorCode, + s"Failed to init FS for user:$getProxyUser " + ) + } } def beforeOperation(): Unit = { if (closed) { throw new StorageErrorException( - 52002, + ENGINE_CLOSED_IO_ILLEGAL.getErrorCode, s"$fsType storage($id) engine($bindEngineLabel) has been closed, IO operation was illegal." ) } @@ -160,14 +162,17 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging methodProxy: MethodProxy ): AnyRef = { if (closed && method.getName != "close") { - throw new StorageErrorException(52002, s"$fsType storage has been closed.") + throw new StorageErrorException( + STORAGE_HAS_BEEN_CLOSED.getErrorCode, + s"$fsType storage has been closed." + ) } if (System.currentTimeMillis() - lastAccessTime >= iOEngineExecutorMaxFreeTime) synchronized { method.getName match { case "init" => case "storageName" => return fsType case "setUser" => - properties.asScala += StorageConfiguration.PROXY_USER.key -> args(0).asInstanceOf[String]; + properties += StorageConfiguration.PROXY_USER.key -> args(0).asInstanceOf[String]; return Unit case _ => if (inited) { @@ -180,22 +185,23 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging method.getName match { case "init" => val user = - if (properties.asScala.contains(StorageConfiguration.PROXY_USER.key)) { - StorageConfiguration.PROXY_USER.getValue(properties.asScala.toMap) - } else null + if (properties.contains(StorageConfiguration.PROXY_USER.key)) { + StorageConfiguration.PROXY_USER.getValue(properties.toMap) + } else { + null + } if (args.length > 0 && args(0).isInstanceOf[java.util.Map[String, String]]) { - properties.asScala ++= args(0).asInstanceOf[java.util.Map[String, String]].asScala + properties ++= args(0).asInstanceOf[java.util.Map[String, String]].asScala } - if (StringUtils.isNotEmpty(user)) { - properties.asScala += StorageConfiguration.PROXY_USER.key -> user + if (StringUtils.isNoneBlank(user)) { + properties += StorageConfiguration.PROXY_USER.key -> user } initFS() logger.warn(s"For user($user)inited a $fsType storage($id) .") Unit case "fsName" => fsType case "setUser" => - properties.asScala += StorageConfiguration.PROXY_USER.key -> args(0).asInstanceOf[String]; - Unit + properties += StorageConfiguration.PROXY_USER.key -> args(0).asInstanceOf[String]; Unit case "read" => if (!inited) throw new IllegalAccessException("storage has not been inited.") new IOInputStream(args) diff --git a/linkis-orchestrator/linkis-code-orchestrator/pom.xml b/linkis-orchestrator/linkis-code-orchestrator/pom.xml index 0dcd74dd0b3..0a04582cdea 100644 --- a/linkis-orchestrator/linkis-code-orchestrator/pom.xml +++ b/linkis-orchestrator/linkis-code-orchestrator/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-code-orchestrator diff --git a/linkis-orchestrator/linkis-orchestrator-core/pom.xml b/linkis-orchestrator/linkis-orchestrator-core/pom.xml index b7eef26e34b..0bba6b23c67 100644 --- a/linkis-orchestrator/linkis-orchestrator-core/pom.xml +++ b/linkis-orchestrator/linkis-orchestrator-core/pom.xml @@ -19,8 +19,9 @@ 4.0.0 org.apache.linkis - linkis-orchestrator + linkis 1.3.0 + ../../pom.xml linkis-orchestrator-core diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/domain/JobReq.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/domain/JobReq.scala index be24baa3585..a2dbaa54fd1 100644 --- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/domain/JobReq.scala +++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/domain/JobReq.scala @@ -94,5 +94,5 @@ object AbstractJobReq { } object JobReq { - def getDefaultPriority: Unit = 0 + def getDefaultPriority: Int = 0 } diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/pom.xml b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/pom.xml index 77742fce8f9..6ba543f1e43 100644 --- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/pom.xml +++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/pom.xml @@ -19,9 +19,9 @@ 4.0.0 org.apache.linkis - linkis-orchestrator + linkis 1.3.0 - ../../pom.xml + ../../../pom.xml linkis-orchestrator-ecm-plugin diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala index 50069b81307..c19692ed656 100644 --- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala +++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala @@ -233,7 +233,7 @@ class LoadBalanceLabelEngineConnManager extends ComputationEngineConnManager wit getMarkCache() .values() .asScala - .foreach(_.asScala.foreach(s => instances.asJava.add(s.getInstance))) + .foreach(_.asScala.foreach(s => instances.append(s.getInstance))) instances.toArray } diff --git a/linkis-public-enhancements/distribution.xml b/linkis-public-enhancements/distribution.xml index 534c63895b6..d824bbe6103 100644 --- a/linkis-public-enhancements/distribution.xml +++ b/linkis-public-enhancements/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-publicservice dir @@ -37,25 +35,6 @@ false false true - antlr:antlr:jar aopalliance:aopalliance:jar @@ -308,33 +287,6 @@ - - - ${basedir}/conf - - * - - 0777 - conf - unix - - - diff --git a/linkis-public-enhancements/linkis-basedata-manager/pom.xml b/linkis-public-enhancements/linkis-basedata-manager/pom.xml index c253ed1b94f..369e1192f15 100644 --- a/linkis-public-enhancements/linkis-basedata-manager/pom.xml +++ b/linkis-public-enhancements/linkis-basedata-manager/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-basedata-manager jar diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-basedata-manager/src/main/assembly/distribution.xml index 5e0d66a66d0..195bcc4f77c 100644 --- a/linkis-public-enhancements/linkis-basedata-manager/src/main/assembly/distribution.xml +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-basedata-manager dir diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-client/pom.xml b/linkis-public-enhancements/linkis-bml/linkis-bml-client/pom.xml index 751afcfb284..b6bfd73115f 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-client/pom.xml +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-client/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-bml-client diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/java/org/apache/linkis/bml/client/errorcode/BmlClientErrorCodeSummary.java b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/java/org/apache/linkis/bml/client/errorcode/BmlClientErrorCodeSummary.java new file mode 100644 index 00000000000..afb6d015205 --- /dev/null +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/java/org/apache/linkis/bml/client/errorcode/BmlClientErrorCodeSummary.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.bml.client.errorcode; + +import org.apache.linkis.common.errorcode.ErrorCodeUtils; + +public enum BmlClientErrorCodeSummary { + /** + * 10000-10999 linkis-frame 11000-12999 linkis-commons 13000-14999 linkis-spring-cloud-services + * 15000-19999 linkis-public-enhancements 20000-24999 linkis-computation-governance 25000-25999 + * linkis-extensions 26000-29999 linkis-engineconn-plugins + */ + POST_REQUEST_RESULT_NOT_MATCH( + 20060, + "the result returned by the repository client POST request does not match(物料库客户端POST请求返回的result不匹配)", + "the result returned by the repository client POST request does not match(物料库客户端POST请求返回的result不匹配)", + "bmlClient"), + + BML_CLIENT_FAILED( + 20061, + "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)", + "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)", + "bmlClient"), + SERVER_URL_NOT_NULL( + 20062, + "serverUrl cannot be null(服务器URL不能为空)", + "serverUrl cannot be null(服务器URL不能为空)", + "bmlClient"); + + private int errorCode; + + private String errorDesc; + + private String comment; + + private String module; + + BmlClientErrorCodeSummary(int errorCode, String errorDesc, String comment, String module) { + ErrorCodeUtils.validateErrorCode(errorCode, 20000, 24999); + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + this.module = module; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + public String getModule() { + return module; + } + + public void setModule(String module) { + this.module = module; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/client/impl/HttpBmlClient.scala b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/client/impl/HttpBmlClient.scala index f2d8bb3e2dd..e71ddb4644c 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/client/impl/HttpBmlClient.scala +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/client/impl/HttpBmlClient.scala @@ -18,6 +18,11 @@ package org.apache.linkis.bml.client.impl import org.apache.linkis.bml.client.AbstractBmlClient +import org.apache.linkis.bml.client.errorcode.BmlClientErrorCodeSummary.{ + BML_CLIENT_FAILED, + POST_REQUEST_RESULT_NOT_MATCH, + SERVER_URL_NOT_NULL +} import org.apache.linkis.bml.common._ import org.apache.linkis.bml.conf.BmlConfiguration._ import org.apache.linkis.bml.http.HttpConf @@ -69,7 +74,12 @@ class HttpBmlClient( private def createClientConfig(): DWSClientConfig = { val _serverUrl = if (StringUtils.isEmpty(serverUrl)) HttpConf.gatewayInstance else serverUrl - if (StringUtils.isEmpty(_serverUrl)) throw BmlClientFailException("serverUrl cannot be null.") + if (StringUtils.isEmpty(_serverUrl)) { + throw BmlClientFailException( + SERVER_URL_NOT_NULL.getErrorCode, + SERVER_URL_NOT_NULL.getErrorDesc + ) + } val config = if (properties == null) { new util.HashMap[String, Object]() } else { @@ -198,9 +208,8 @@ class HttpBmlClient( "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)", e ) - val exception = BmlClientFailException( - "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)" - ) + val exception = + BmlClientFailException(BML_CLIENT_FAILED.getErrorCode, BML_CLIENT_FAILED.getErrorDesc) exception.initCause(e) throw exception case t: Throwable => @@ -246,9 +255,8 @@ class HttpBmlClient( "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)", e ) - val exception = BmlClientFailException( - "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)" - ) + val exception = + BmlClientFailException(BML_CLIENT_FAILED.getErrorCode, BML_CLIENT_FAILED.getErrorDesc) exception.initCause(e) throw e case t: Throwable => @@ -309,8 +317,15 @@ class HttpBmlClient( } case r: BmlResult => logger.error(s"result type ${r.getResultType} not match BmlResourceDownloadResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) + case _ => + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) } } @@ -354,8 +369,15 @@ class HttpBmlClient( } case r: BmlResult => logger.error(s"result type ${r.getResultType} not match BmlResourceDownloadResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) + case _ => + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) } } @@ -412,8 +434,15 @@ class HttpBmlClient( } case r: BmlResult => logger.error(s"result type ${r.getResultType} not match BmlResourceDownloadResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) + case _ => + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) } } @@ -445,8 +474,15 @@ class HttpBmlClient( } case r: BmlResult => logger.error(s"result type ${r.getResultType} not match BmlResourceDownloadResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) + case _ => + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) } } @@ -480,8 +516,15 @@ class HttpBmlClient( } case r: BmlResult => logger.error(s"result type ${r.getResultType} not match BmlCreateBmlProjectResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) + case _ => + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) } } @@ -514,8 +557,15 @@ class HttpBmlClient( } case r: BmlResult => logger.error(s"result type ${r.getResultType} not match BmlResourceDownloadResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) + case _ => + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) } } @@ -576,8 +626,15 @@ class HttpBmlClient( } case r: BmlResult => logger.error(s"result type ${r.getResultType} not match BmlResourceDownloadResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) + case _ => + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) } } @@ -615,8 +672,15 @@ class HttpBmlClient( } case r: BmlResult => logger.error(s"result type ${r.getResultType} not match BmlCreateBmlProjectResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) + case _ => + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) } } @@ -645,8 +709,15 @@ class HttpBmlClient( } case r: BmlResult => logger.error(s"result type ${r.getResultType} not match BmlUpdateProjectResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) + case _ => + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) } } @@ -690,8 +761,15 @@ class HttpBmlClient( } case r: BmlResult => logger.error(s"result type ${r.getResultType} not match BmlCopyResourceResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) + case _ => + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) } } @@ -720,8 +798,15 @@ class HttpBmlClient( } case r: BmlResult => logger.error(s"result type ${r.getResultType} not match BmlRollbackVersionResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) + case _ => + throw POSTResultNotMatchException( + POST_REQUEST_RESULT_NOT_MATCH.getErrorCode, + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc + ) } } diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/common/POSTActionFailException.scala b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/common/POSTActionFailException.scala index 874fe9f1913..0aaf68bbbad 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/common/POSTActionFailException.scala +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/common/POSTActionFailException.scala @@ -19,29 +19,8 @@ package org.apache.linkis.bml.common import org.apache.linkis.common.exception.ErrorException -case class POSTActionFailException() - extends ErrorException(70025, "material house client request failed(物料库客户端请求失败)") {} +case class POSTResultNotMatchException(errorCode: Int, errorMsg: String) + extends ErrorException(errorCode, errorMsg) -case class POSTResultNotMatchException() - extends ErrorException( - 70021, - "The result returned by the repository client POST request does not match(物料库客户端POST请求返回的result不匹配)" - ) - -case class IllegalPathException() - extends ErrorException( - 70035, - "The catalog that was passed into the store does not exist or is illegal(传入物料库的目录不存在或非法)" - ) - -case class BmlResponseErrorException(errorMessage: String) - extends ErrorException(70038, errorMessage) - -case class GetResultNotMatchException() - extends ErrorException( - 70078, - "The result returned by the repository client GET request does not match(物料库客户端GET请求返回的result不匹配)" - ) - -case class BmlClientFailException(errorMsg: String) - extends ErrorException(70081, "An error occurred in the material client(物料库客户端出现错误)") +case class BmlClientFailException(errorCode: Int, errorMsg: String) + extends ErrorException(errorCode, errorMsg) diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/test/java/org/apache/linkis/bml/client/errorcode/BmlClientErrorCodeSummaryTest.java b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/test/java/org/apache/linkis/bml/client/errorcode/BmlClientErrorCodeSummaryTest.java new file mode 100644 index 00000000000..d524d27238e --- /dev/null +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/test/java/org/apache/linkis/bml/client/errorcode/BmlClientErrorCodeSummaryTest.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.bml.client.errorcode; + +import org.junit.jupiter.api.Test; + +import static org.apache.linkis.bml.client.errorcode.BmlClientErrorCodeSummary.BML_CLIENT_FAILED; +import static org.apache.linkis.bml.client.errorcode.BmlClientErrorCodeSummary.POST_REQUEST_RESULT_NOT_MATCH; +import static org.apache.linkis.bml.client.errorcode.BmlClientErrorCodeSummary.SERVER_URL_NOT_NULL; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class BmlClientErrorCodeSummaryTest { + @Test + void testGetErrorCode() { + assertEquals(20060, POST_REQUEST_RESULT_NOT_MATCH.getErrorCode()); + assertEquals(20061, BML_CLIENT_FAILED.getErrorCode()); + assertEquals(20062, SERVER_URL_NOT_NULL.getErrorCode()); + } + + @Test + void testSetErrorCode() { + POST_REQUEST_RESULT_NOT_MATCH.setErrorCode(1); + assertEquals(1, POST_REQUEST_RESULT_NOT_MATCH.getErrorCode()); + POST_REQUEST_RESULT_NOT_MATCH.setErrorCode(20060); + assertEquals(20060, POST_REQUEST_RESULT_NOT_MATCH.getErrorCode()); + + BML_CLIENT_FAILED.setErrorCode(1); + assertEquals(1, BML_CLIENT_FAILED.getErrorCode()); + BML_CLIENT_FAILED.setErrorCode(20061); + assertEquals(20061, BML_CLIENT_FAILED.getErrorCode()); + + SERVER_URL_NOT_NULL.setErrorCode(1); + assertEquals(1, SERVER_URL_NOT_NULL.getErrorCode()); + SERVER_URL_NOT_NULL.setErrorCode(20062); + assertEquals(20062, SERVER_URL_NOT_NULL.getErrorCode()); + } + + @Test + void testGetErrorDesc() { + assertEquals( + "the result returned by the repository client POST request does not match(物料库客户端POST请求返回的result不匹配)", + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc()); + assertEquals( + "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)", + BML_CLIENT_FAILED.getErrorDesc()); + assertEquals("serverUrl cannot be null(服务器URL不能为空)", SERVER_URL_NOT_NULL.getErrorDesc()); + } + + @Test + void testSetErrorDesc() { + POST_REQUEST_RESULT_NOT_MATCH.setErrorDesc("test"); + assertEquals("test", POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc()); + POST_REQUEST_RESULT_NOT_MATCH.setErrorDesc( + "the result returned by the repository client POST request does not match(物料库客户端POST请求返回的result不匹配)"); + assertEquals( + "the result returned by the repository client POST request does not match(物料库客户端POST请求返回的result不匹配)", + POST_REQUEST_RESULT_NOT_MATCH.getErrorDesc()); + + BML_CLIENT_FAILED.setErrorDesc("test"); + assertEquals("test", BML_CLIENT_FAILED.getErrorDesc()); + BML_CLIENT_FAILED.setErrorDesc( + "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)"); + assertEquals( + "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)", + BML_CLIENT_FAILED.getErrorDesc()); + + SERVER_URL_NOT_NULL.setErrorDesc("test"); + assertEquals("test", SERVER_URL_NOT_NULL.getErrorDesc()); + SERVER_URL_NOT_NULL.setErrorDesc("serverUrl cannot be null(服务器URL不能为空)"); + assertEquals("serverUrl cannot be null(服务器URL不能为空)", SERVER_URL_NOT_NULL.getErrorDesc()); + } + + @Test + void testGetComment() { + assertEquals( + "the result returned by the repository client POST request does not match(物料库客户端POST请求返回的result不匹配)", + POST_REQUEST_RESULT_NOT_MATCH.getComment()); + assertEquals( + "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)", + BML_CLIENT_FAILED.getComment()); + assertEquals("serverUrl cannot be null(服务器URL不能为空)", SERVER_URL_NOT_NULL.getComment()); + } + + @Test + void testSetComment() { + POST_REQUEST_RESULT_NOT_MATCH.setComment("test"); + assertEquals("test", POST_REQUEST_RESULT_NOT_MATCH.getComment()); + POST_REQUEST_RESULT_NOT_MATCH.setComment( + "the result returned by the repository client POST request does not match(物料库客户端POST请求返回的result不匹配)"); + assertEquals( + "the result returned by the repository client POST request does not match(物料库客户端POST请求返回的result不匹配)", + POST_REQUEST_RESULT_NOT_MATCH.getComment()); + + BML_CLIENT_FAILED.setComment("test"); + assertEquals("test", BML_CLIENT_FAILED.getComment()); + BML_CLIENT_FAILED.setComment( + "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)"); + assertEquals( + "failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)", + BML_CLIENT_FAILED.getComment()); + + SERVER_URL_NOT_NULL.setComment("test"); + assertEquals("test", SERVER_URL_NOT_NULL.getComment()); + SERVER_URL_NOT_NULL.setComment("serverUrl cannot be null(服务器URL不能为空)"); + assertEquals("serverUrl cannot be null(服务器URL不能为空)", SERVER_URL_NOT_NULL.getComment()); + } + + @Test + void testGetModule() { + assertEquals("bmlClient", POST_REQUEST_RESULT_NOT_MATCH.getModule()); + assertEquals("bmlClient", BML_CLIENT_FAILED.getModule()); + assertEquals("bmlClient", SERVER_URL_NOT_NULL.getModule()); + } + + @Test + void testSetModule() { + POST_REQUEST_RESULT_NOT_MATCH.setModule("test"); + assertEquals("test", POST_REQUEST_RESULT_NOT_MATCH.getModule()); + POST_REQUEST_RESULT_NOT_MATCH.setModule("bmlClient"); + assertEquals("bmlClient", POST_REQUEST_RESULT_NOT_MATCH.getModule()); + + BML_CLIENT_FAILED.setModule("test"); + assertEquals("test", BML_CLIENT_FAILED.getModule()); + BML_CLIENT_FAILED.setModule("bmlClient"); + assertEquals("bmlClient", BML_CLIENT_FAILED.getModule()); + + SERVER_URL_NOT_NULL.setModule("test"); + assertEquals("test", SERVER_URL_NOT_NULL.getModule()); + SERVER_URL_NOT_NULL.setModule("bmlClient"); + assertEquals("bmlClient", SERVER_URL_NOT_NULL.getModule()); + } +} diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-common/pom.xml b/linkis-public-enhancements/linkis-bml/linkis-bml-common/pom.xml index 612789821f1..c75061f5365 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-common/pom.xml +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-common/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-bml-common diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/pom.xml b/linkis-public-enhancements/linkis-bml/linkis-bml-server/pom.xml index 1e87c63008f..cf51081242e 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/pom.xml +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-bmlserver diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/assembly/distribution.xml index 9700c215add..9bf1c874628 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/assembly/distribution.xml +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-bml dir diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/BmlProjectDao.java b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/BmlProjectDao.java index f9714c7f51c..068b5107667 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/BmlProjectDao.java +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/BmlProjectDao.java @@ -27,31 +27,10 @@ @Mapper public interface BmlProjectDao { - @Insert( - "insert ignore into linkis_ps_bml_project(name, `system`, source, description, creator, enabled, create_time) " - + "values(#{bmlProject.name}, #{bmlProject.system}, #{bmlProject.source}, #{bmlProject.description}, " - + "#{bmlProject.creator}, #{bmlProject.enabled}, #{bmlProject.createTime})") - @Options(useGeneratedKeys = true, keyProperty = "bmlProject.id", keyColumn = "id") void createNewProject(@Param("bmlProject") BmlProject bmlProject); - @Select("select * from linkis_ps_bml_project where name = #{projectName}") - @Results( - value = { - @Result(property = "id", column = "id"), - @Result(property = "name", column = "name"), - @Result(property = "createTime", column = "create_time") - }) BmlProject getBmlProject(@Param("projectName") String projectName); - @Insert({ - "" - }) void setProjectPriv( @Param("projectId") Integer projectId, @Param("usernames") List usernames, @@ -59,37 +38,19 @@ void setProjectPriv( @Param("creator") String creator, @Param("createTime") Date createTime); - @Select( - "select a.priv from linkis_ps_bml_project_user a join linkis_ps_bml_project b on " - + "a.project_id = b.id and b.name = #{projectName} and a.username = #{username}") Integer getPrivInProject( @Param("projectName") String projectName, @Param("username") String username); - @Insert( - "insert ignore into linkis_ps_bml_project_resource(project_id, resource_id) " - + "values(#{projectId}, #{resourceId})") - // @Options(useGeneratedKeys = true, keyProperty = "bmlProject.id", keyColumn = "id") void addProjectResource(@Param("projectId") Integer id, @Param("resourceId") String resourceId); - @Select( - "select a.name from linkis_ps_bml_project a join " - + " linkis_ps_bml_project_resource b on b.resource_id = #{resourceId} and a.id = b.project_id") String getProjectNameByResourceId(@Param("resourceId") String resourceId); - @Select("select id from linkis_ps_bml_project where name = #{projectName}") Integer getProjectIdByName(@Param("projectName") String projectName); - @Insert( - "insert ignore into linkis_ps_bml_project_resource(project_id, resource_id) " - + "values(#{projectId}, #{resourceId})") void attachResourceAndProject( @Param("projectId") Integer projectId, @Param("resourceId") String resourceId); - @Select( - "select count(*) from linkis_ps_bml_project_resource where project_id = #{resourceId} and resource_id = #{resourceId}") - Integer checkIfExists( - @Param("projectId") Integer projectId, @Param("resourceId") String resourceId); + int checkIfExists(@Param("projectId") Integer projectId, @Param("resourceId") String resourceId); - @Delete("delete from linkis_ps_bml_project_user where project_id = #{projectId}") void deleteAllPriv(@Param("projectId") Integer projectId); } diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/ResourceDao.java b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/ResourceDao.java index 5fb770c6548..c056b2ed4da 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/ResourceDao.java +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/ResourceDao.java @@ -20,8 +20,6 @@ import org.apache.linkis.bml.entity.Resource; import org.apache.ibatis.annotations.Param; -import org.apache.ibatis.annotations.Select; -import org.apache.ibatis.annotations.Update; import java.util.List; import java.util.Map; @@ -36,17 +34,12 @@ public interface ResourceDao { long uploadResource(Resource resource); - @Select( - "select exists(select * from `linkis_ps_bml_resources` where resource_id = #{resourceId} and enable_flag = 1 )") int checkExists(@Param("resourceId") String resourceId); Resource getResource(@Param("resourceId") String resourceId); - @Select("select owner from `linkis_ps_bml_resources` where resource_id = #{resourceId} ") String getUserByResourceId(@Param("resourceId") String resourceId); - @Update( - "update `linkis_ps_bml_resources` set owner = #{newOwner} where resource_id = #{resourceId} and owner=#{oldOwner}") void changeOwner( @Param("resourceId") String resourceId, @Param("oldOwner") String oldOwner, diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/VersionDao.java b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/VersionDao.java index 71c1c6f1048..49cda950694 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/VersionDao.java +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/VersionDao.java @@ -21,8 +21,6 @@ import org.apache.linkis.bml.entity.Version; import org.apache.ibatis.annotations.Param; -import org.apache.ibatis.annotations.Select; -import org.apache.ibatis.annotations.Update; import java.util.List; import java.util.Map; @@ -51,17 +49,11 @@ void bathDeleteVersions( String getNewestVersion(@Param("resourceId") String resourceId); - @Select( - "select start_byte from linkis_ps_bml_resources_version where resource_id = #{resourceId} and version = #{version}") long getStartByteForResource( @Param("resourceId") String resourceId, @Param("version") String version); - @Select( - "select end_byte from linkis_ps_bml_resources_version where resource_id = #{resourceId} and version = #{version}") long getEndByte(@Param("resourceId") String resourceId, @Param("version") String version); - @Select( - "select * from linkis_ps_bml_resources_version where resource_id=#{resourceId} and version = #{version} and enable_flag = 1") ResourceVersion findResourceVersion( @Param("resourceId") String resourceId, @Param("version") String version); @@ -71,12 +63,8 @@ List getAllResourcesViaSystem( List selectResourcesViaSystemByPage( @Param("system") String system, @Param("user") String user); - @Select( - "select exists(select * from `linkis_ps_bml_resources_version` where resource_id=#{resourceId} and version=#{version} and enable_flag = 1)") int checkVersion(@Param("resourceId") String resourceId, @Param("version") String version); - @Select( - "select enable_flag from `linkis_ps_bml_resources_version` where resource_id = #{resourceId} and version = #{version}") int selectResourceVersionEnbleFlag( @Param("resourceId") String resourceId, @Param("version") String version); @@ -85,8 +73,6 @@ int selectResourceVersionEnbleFlag( * * @param resourceId resourceId */ - @Update( - "update `linkis_ps_bml_resources_version` set enable_flag = 0 where resource_id = #{resourceId}") void deleteResource(@Param("resourceId") String resourceId); void batchDeleteResources(@Param("resourceIds") List resourceIds); @@ -96,7 +82,5 @@ ResourceVersion getResourceVersion( List selectVersionByPage(@Param("resourceId") String resourceId); - @Select( - "select * from linkis_ps_bml_resources_version where resource_id=#{resourceId} and enable_flag = 1") List getResourceVersionsByResourceId(@Param("resourceId") String resourceId); } diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/impl/BmlProjectMapper.xml b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/impl/BmlProjectMapper.xml new file mode 100644 index 00000000000..83f8472a0dd --- /dev/null +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/impl/BmlProjectMapper.xml @@ -0,0 +1,62 @@ + + + + + + + + insert ignore into linkis_ps_bml_project(name, `system`, source, description, creator, enabled, create_time) + values(#{bmlProject.name}, #{bmlProject.system}, #{bmlProject.source}, #{bmlProject.description}, + #{bmlProject.creator}, #{bmlProject.enabled}, #{bmlProject.createTime}) + + + + insert ignore into linkis_ps_bml_project_user(project_id, username, priv, creator, create_time) values + + #{projectId}, #{username}, #{priv}, #{creator}, #{createTime} + + + + + insert ignore into linkis_ps_bml_project_resource(project_id, resource_id) + values(#{projectId}, #{resourceId}) + + + + + insert ignore into linkis_ps_bml_project_resource(project_id, resource_id) + values(#{projectId}, #{resourceId}) + + + + delete from linkis_ps_bml_project_user where project_id = #{projectId} + + + diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/impl/ResourceMapper.xml b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/impl/ResourceMapper.xml index df11ce22e38..0234e49b8f7 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/impl/ResourceMapper.xml +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/impl/ResourceMapper.xml @@ -92,6 +92,16 @@ #{updator},#{enableFlag}) + + + + + update `linkis_ps_bml_resources` set owner = #{newOwner} where resource_id = #{resourceId} and owner=#{oldOwner} + diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/impl/VersionMapper.xml b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/impl/VersionMapper.xml index 320c88d4586..0600bd4331b 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/impl/VersionMapper.xml +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/main/java/org/apache/linkis/bml/dao/impl/VersionMapper.xml @@ -163,5 +163,32 @@ select * from linkis_ps_bml_resources_version where resource_id = #{resourceId} and version = #{version} and enable_flag = 1 + + + + + + + + + + + + update `linkis_ps_bml_resources_version` set enable_flag = 0 where resource_id = #{resourceId} + + + diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/BmlProjectDaoTest.java b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/BmlProjectDaoTest.java index a1302394cb9..ff8e5279364 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/BmlProjectDaoTest.java +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/BmlProjectDaoTest.java @@ -26,13 +26,31 @@ import java.util.List; import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.junit.jupiter.api.Assertions.assertTrue; class BmlProjectDaoTest extends BaseDaoTest { + private static final Logger logger = LoggerFactory.getLogger(BmlProjectDaoTest.class); + @Autowired BmlProjectDao bmlProjectDao; + void insertNewProject() { + BmlProject bmlProject = new BmlProject(); + bmlProject.setName("testName"); + bmlProject.setSystem("testSy"); + bmlProject.setSource("test"); + bmlProject.setDescription("descTest"); + bmlProject.setCreator("creCreatorUser"); + bmlProject.setEnabled(1); + bmlProject.setCreateTime(new Date()); + bmlProjectDao.createNewProject(bmlProject); + } + @Test - void createNewProject() { + void testCreateNewProject() { BmlProject bmlProject = new BmlProject(); bmlProject.setName("testName"); bmlProject.setSystem("testSy"); @@ -41,18 +59,19 @@ void createNewProject() { bmlProject.setCreator("creCreatorUser"); bmlProject.setEnabled(1); bmlProject.setCreateTime(new Date()); - bmlProject.setId(1); bmlProjectDao.createNewProject(bmlProject); + BmlProject bmlProjects = bmlProjectDao.getBmlProject("testName"); + assertTrue(bmlProjects != null); } @Test - void getBmlProject() { - createNewProject(); + void testGetBmlProject() { + insertNewProject(); bmlProjectDao.getBmlProject("testName"); } @Test - void setProjectPriv() { + void testSetProjectPriv() { List usernamesList = new ArrayList<>(); usernamesList.add("creCreatorUser"); usernamesList.add("creCreatorUser1"); @@ -61,44 +80,48 @@ void setProjectPriv() { } @Test - void getPrivInProject() { - setProjectPriv(); - createNewProject(); - bmlProjectDao.getPrivInProject("testName", "creCreatorUser"); + void testGetPrivInProject() { + Integer privInt = bmlProjectDao.getPrivInProject("testName", "creCreatorUser"); + logger.info("privInt:" + privInt); + assertTrue(privInt == 2); } @Test - void addProjectResource() { + void testAddProjectResource() { bmlProjectDao.addProjectResource(1, "123"); } @Test - void getProjectNameByResourceId() { - setProjectPriv(); - bmlProjectDao.getProjectNameByResourceId("123"); + void testGetProjectNameByResourceId() { + String projectName = bmlProjectDao.getProjectNameByResourceId("123"); + logger.info("projectName:" + projectName); + assertTrue(projectName.equals("testName")); } @Test - void getProjectIdByName() { - createNewProject(); + void testGetProjectIdByName() { + insertNewProject(); bmlProjectDao.getProjectIdByName("testName"); + Integer i = bmlProjectDao.getProjectIdByName("testName"); + assertTrue(i != null); } @Test - void attachResourceAndProject() { - createNewProject(); + void testAttachResourceAndProject() { + insertNewProject(); bmlProjectDao.attachResourceAndProject(1, "123"); } @Test - void checkIfExists() { - setProjectPriv(); - bmlProjectDao.checkIfExists(1, "123"); + void testCheckIfExists() { + insertNewProject(); + Integer i = bmlProjectDao.checkIfExists(1, "123"); + assertTrue(i != null); } @Test - void deleteAllPriv() { - setProjectPriv(); + void testDeleteAllPriv() { + insertNewProject(); bmlProjectDao.deleteAllPriv(1); } } diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/DownloadDaoTest.java b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/DownloadDaoTest.java index 6ae0966c994..78de5d0b09c 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/DownloadDaoTest.java +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/DownloadDaoTest.java @@ -32,7 +32,7 @@ class DownloadDaoTest extends BaseDaoTest { @Autowired DownloadDao downloadDao; @Test - void insertDownloadModel() { + void testInsertDownloadModel() { DownloadModel downloadModel = new DownloadModel(); downloadModel.setDownloader("test"); downloadModel.setClientIp("192.143.253"); diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/ResourceDaoTest.java b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/ResourceDaoTest.java index fbba71b6ccd..99657dd41c0 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/ResourceDaoTest.java +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/ResourceDaoTest.java @@ -31,9 +31,25 @@ class ResourceDaoTest extends BaseDaoTest { @Autowired ResourceDao resourceDao; + void insertResource() { + Resource resource = new Resource(); + resource.setResourceId("123"); + resource.setResourceHeader("2"); + resource.setDownloadedFileName("testFileName"); + resource.setSystem("testSystem"); + resource.setCreateTime(new Date()); + resource.setUser("testUser"); + resource.setExpireTime("2012.12.02"); + resource.setMaxVersion(3); + resource.setUpdateTime(new Date()); + resource.setUpdator("testUpdator"); + resource.setEnableFlag(false); + resourceDao.uploadResource(resource); + } + @Test - void getResources() { - uploadResource(); + void testGetResources() { + insertResource(); Map map = new HashMap<>(); map.put("owner", "testowner"); map.put("resource_id", "123"); @@ -42,14 +58,14 @@ void getResources() { } @Test - void deleteResource() { - uploadResource(); + void testDeleteResource() { + insertResource(); resourceDao.deleteResource("123"); } @Test - void batchDeleteResources() { - uploadResource(); + void testBatchDeleteResources() { + insertResource(); List list = new ArrayList<>(); list.add("123"); list.add("2"); @@ -58,7 +74,7 @@ void batchDeleteResources() { } @Test - void uploadResource() { + void testUploadResource() { Resource resource = new Resource(); resource.setResourceId("123"); resource.setResourceHeader("2"); @@ -75,25 +91,25 @@ void uploadResource() { } @Test - void checkExists() { - uploadResource(); + void testCheckExists() { + insertResource(); resourceDao.checkExists("123"); } @Test - void getResource() { - uploadResource(); + void testGetResource() { + insertResource(); resourceDao.getResource("123"); } @Test - void getUserByResourceId() { - uploadResource(); + void testGetUserByResourceId() { + insertResource(); resourceDao.getUserByResourceId("123"); } @Test - void changeOwner() { + void testChangeOwner() { String oldOwner = "oldtest"; String newOwner = "newtest"; resourceDao.changeOwner("123", oldOwner, newOwner); diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/TaskDaoTest.java b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/TaskDaoTest.java index 97e30dbe2cf..68b42fb1bb3 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/TaskDaoTest.java +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/TaskDaoTest.java @@ -30,8 +30,27 @@ class TaskDaoTest extends BaseDaoTest { @Autowired TaskDao taskDao; + void insertResourceTask() { + ResourceTask resourceTask = new ResourceTask(); + resourceTask.setResourceId("123"); + resourceTask.setClientIp("192.168.142"); + resourceTask.setEndTime(new Date()); + resourceTask.setId(32); + resourceTask.setStartTime(new Date()); + resourceTask.setErrMsg("testErr"); + resourceTask.setExtraParams("testpar"); + resourceTask.setInstance("testInst"); + resourceTask.setLastUpdateTime(new Date()); + resourceTask.setOperation("testOPer"); + resourceTask.setState("1"); + resourceTask.setSubmitUser("testSumUser"); + resourceTask.setSystem("testSym"); + resourceTask.setVersion("1.2"); + taskDao.insert(resourceTask); + } + @Test - void insert() { + void testInsert() { ResourceTask resourceTask = new ResourceTask(); resourceTask.setResourceId("123"); resourceTask.setClientIp("192.168.142"); @@ -51,20 +70,20 @@ void insert() { } @Test - void updateState() { - insert(); + void testUpdateState() { + insertResourceTask(); taskDao.updateState(32, "1", new Date()); } @Test - void updateState2Failed() { - insert(); + void testUpdateState2Failed() { + insertResourceTask(); taskDao.updateState2Failed(32, "1", new Date(), "errMsg"); } @Test - void getNewestVersion() { - insert(); + void testGetNewestVersion() { + insertResourceTask(); taskDao.getNewestVersion("123"); } } diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/VersionDaoTest.java b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/VersionDaoTest.java index 9f9ab924122..b363c7e3a75 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/VersionDaoTest.java +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/VersionDaoTest.java @@ -18,6 +18,7 @@ package org.apache.linkis.bml.dao; import org.apache.linkis.bml.entity.ResourceVersion; +import org.apache.linkis.bml.entity.Version; import org.springframework.beans.factory.annotation.Autowired; @@ -34,21 +35,41 @@ class VersionDaoTest extends BaseDaoTest { private final String resourceId = "123"; private final String version = "1.2"; + void insertVersion() { + ResourceVersion resourceVersion = new ResourceVersion(); + resourceVersion.setResourceId(resourceId); + resourceVersion.setUser("binbin"); + resourceVersion.setSystem("testSys"); + resourceVersion.setFileMd5("binbinmd5"); + resourceVersion.setVersion(version); + resourceVersion.setSize(25); + resourceVersion.setStartByte(35); + resourceVersion.setEndByte(36); + resourceVersion.setResource("testreso"); + resourceVersion.setDescription("testDesc"); + resourceVersion.setStartTime(new Date()); + resourceVersion.setEndTime(new Date()); + resourceVersion.setClientIp("132.145.36"); + resourceVersion.setUpdator("testUp"); + resourceVersion.setEnableFlag(true); + versionDao.insertNewVersion(resourceVersion); + } + @Test - void getVersion() { - insertNewVersion(); + void testGetVersion() { + insertVersion(); versionDao.getVersion(resourceId, version); } @Test - void getVersions() { - insertNewVersion(); + void testGetVersions() { + insertVersion(); versionDao.getVersions(resourceId); } @Test - void getResourcesVersions() { - insertNewVersion(); + void testGetResourcesVersions() { + insertVersion(); Map map = new HashMap<>(); map.put("system", "testSys"); map.put("user", "binbin"); @@ -60,20 +81,20 @@ void getResourcesVersions() { } @Test - void deleteVersion() { - insertNewVersion(); + void testDeleteVersion() { + insertVersion(); versionDao.deleteVersion(resourceId, version); } @Test - void deleteVersions() { - insertNewVersion(); + void testDeleteVersions() { + insertVersion(); versionDao.deleteVersions(resourceId); } @Test - void bathDeleteVersions() { - insertNewVersion(); + void testBathDeleteVersions() { + insertVersion(); List resourceIdlist = new ArrayList<>(); resourceIdlist.add(resourceId); resourceIdlist.add("21"); @@ -84,7 +105,7 @@ void bathDeleteVersions() { } @Test - void insertNewVersion() { + void testInsertNewVersion() { ResourceVersion resourceVersion = new ResourceVersion(); resourceVersion.setResourceId(resourceId); resourceVersion.setUser("binbin"); @@ -100,73 +121,73 @@ void insertNewVersion() { resourceVersion.setEndTime(new Date()); resourceVersion.setClientIp("132.145.36"); resourceVersion.setUpdator("testUp"); - resourceVersion.setEnableFlag(false); + resourceVersion.setEnableFlag(true); versionDao.insertNewVersion(resourceVersion); } @Test - void getResourcePath() { - insertNewVersion(); + void testGetResourcePath() { + insertVersion(); versionDao.getResourcePath(resourceId); } @Test - void getNewestVersion() { - insertNewVersion(); + void testGetNewestVersion() { + insertVersion(); versionDao.getNewestVersion(resourceId); } @Test - void getStartByteForResource() { - insertNewVersion(); + void testGetStartByteForResource() { + insertVersion(); versionDao.getStartByteForResource(resourceId, version); } @Test - void getEndByte() { - insertNewVersion(); + void testGetEndByte() { + insertVersion(); versionDao.getEndByte(resourceId, version); } @Test - void findResourceVersion() { - insertNewVersion(); + void testFindResourceVersion() { + insertVersion(); versionDao.findResourceVersion(resourceId, version); } @Test - void getAllResourcesViaSystem() { - insertNewVersion(); + void testGetAllResourcesViaSystem() { + insertVersion(); versionDao.getAllResourcesViaSystem(resourceId, version); } @Test - void selectResourcesViaSystemByPage() { - insertNewVersion(); + void testSelectResourcesViaSystemByPage() { + insertVersion(); versionDao.selectResourcesViaSystemByPage(resourceId, version); } @Test - void checkVersion() { - insertNewVersion(); + void testCheckVersion() { + insertVersion(); versionDao.checkVersion(resourceId, version); } @Test - void selectResourceVersionEnbleFlag() { - insertNewVersion(); + void testSelectResourceVersionEnbleFlag() { + insertVersion(); versionDao.selectResourceVersionEnbleFlag(resourceId, version); } @Test - void deleteResource() { - insertNewVersion(); + void testDeleteResource() { + insertVersion(); versionDao.deleteResource(resourceId); } @Test - void batchDeleteResources() { - insertNewVersion(); + void testBatchDeleteResources() { + insertVersion(); List resourceIdlist = new ArrayList<>(); resourceIdlist.add(resourceId); resourceIdlist.add("21"); @@ -177,19 +198,21 @@ void batchDeleteResources() { } @Test - void getResourceVersion() { + void testGetResourceVersion() { versionDao.getResourceVersion(resourceId, version); } @Test - void selectVersionByPage() { - insertNewVersion(); - versionDao.selectVersionByPage(resourceId); + void testSelectVersionByPage() { + insertVersion(); + List list = versionDao.selectVersionByPage(resourceId); + assertTrue(list.size() >= 1); } @Test - void getResourceVersionsByResourceId() { - insertNewVersion(); - versionDao.getResourceVersionsByResourceId(resourceId); + void testGetResourceVersionsByResourceId() { + insertVersion(); + List list = versionDao.getResourceVersionsByResourceId(resourceId); + assertTrue(list.size() >= 1); } } diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/resources/create.sql b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/resources/create.sql index 12f4b1855f9..4ad89a52ef9 100644 --- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/src/test/resources/create.sql @@ -26,6 +26,7 @@ CREATE TABLE linkis_ps_bml_project_resource ( PRIMARY KEY ( id ) ); + DROP TABLE IF EXISTS linkis_ps_bml_project; CREATE TABLE linkis_ps_bml_project ( id int(10) NOT NULL AUTO_INCREMENT, @@ -40,6 +41,7 @@ CREATE TABLE linkis_ps_bml_project ( UNIQUE KEY name ( name ) ); + DROP TABLE IF EXISTS linkis_ps_bml_project_user; CREATE TABLE linkis_ps_bml_project_user ( id int(10) NOT NULL AUTO_INCREMENT, @@ -53,6 +55,8 @@ CREATE TABLE linkis_ps_bml_project_user ( UNIQUE KEY user_project ( username , project_id ) ); + + DROP TABLE IF EXISTS linkis_ps_bml_resources_version; CREATE TABLE linkis_ps_bml_resources_version ( id bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key', @@ -124,3 +128,7 @@ CREATE TABLE linkis_ps_resources_download_history ( downloader varchar(50) NOT NULL COMMENT 'Downloader', PRIMARY KEY ( id ) ); + +insert ignore into linkis_ps_bml_project_user(project_id, username, priv, creator, create_time) values ( 1, 'creCreatorUser', 2, 'creatorTest', now()); +insert ignore into linkis_ps_bml_project(name, `system`, source, description, creator, enabled, create_time)values('testName', 'testSy','test', 'descTest','creCreatorUser', 1, now()); +insert ignore into linkis_ps_bml_project_resource(project_id, resource_id) values(1, '123'); \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/pom.xml b/linkis-public-enhancements/linkis-configuration/pom.xml index 3b84c05a678..ff1fd02f160 100644 --- a/linkis-public-enhancements/linkis-configuration/pom.xml +++ b/linkis-public-enhancements/linkis-configuration/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-configuration diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java index 55294398b6a..cd6d8ce9d09 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java @@ -55,7 +55,7 @@ List getConfigByEngineUserCreator( ConfigKey selectKeyByKeyID(@Param("id") Long keyID); - List seleteKeyByKeyName(@Param("keyName") String keyName); + List selectKeyByKeyName(@Param("keyName") String keyName); List listKeyByStringValue(@Param("stringValue") String stringValue); diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/impl/ConfigMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/impl/ConfigMapper.xml index b22b135b25b..7dafb0a39de 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/impl/ConfigMapper.xml +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/impl/ConfigMapper.xml @@ -136,7 +136,7 @@ WHERE id = #{id} - SELECT FROM linkis_ps_configuration_config_key WHERE `key` = #{keyName} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java new file mode 100644 index 00000000000..e402a43dd39 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.errorcode; + +import org.apache.linkis.common.errorcode.ErrorCodeUtils; + +public enum LinkisConfigurationErrorCodeSummary { + BUILD_LABEL_ID(14100, "", ""), + FAILED_TO_BUILD_LABEL(14100, "Failed to build label(建立标签失败)", "Failed to build label(建立标签失败)"), + BUILD_LABEL_IS_NULL( + 14100, + "Failed to build label ,label is null(建立标签失败,标签为空)", + "Failed to build label ,label is null(建立标签失败,标签为空)"), + CONFIGKEY_CANNOT_BE_NULL( + 14100, + "configKey cannot be null(configKey 不能为空)", + "configKey cannot be null(configKey 不能为空)"), + CONFIG_KEY_NOT_EXISTS( + 14100, "config key not exists:(配置键不存在:)", "config key not exists:(配置键不存在:)"), + LABEL_NOT_EXISTS(14100, "label not exists:(标签不存在:)", "label not exists:(标签不存在:)"), + KEY_OR_VALUE_CANNOT( + 14100, "key or value cannot be null(键或值不能为空)", " key or value cannot be null(键或值不能为空)"), + PARAMS_CANNOT_BE_EMPTY( + 14100, "params cannot be empty!(参数不能为空!)", "params cannot be empty!(参数不能为空!)"), + TOKEN_IS_ERROR(14100, "token is error(令牌是错误的)", "token is error(令牌是错误的)"), + IS_NULL_CANNOT_BE_ADDED( + 14100, + "categoryName is null, cannot be added(categoryName 为空,无法添加)", + "categoryName is null, cannot be added(categoryName 为空,无法添加)"), + CANNOT_BE_INCLUDED( + 14100, + "categoryName cannot be included '-'(类别名称不能包含 '-')", + "categoryName cannot be included '-'(类别名称不能包含 '-')"), + CREATOR_IS_NULL_CANNOT_BE_ADDED( + 14100, + "creator is null, cannot be added(创建者为空,无法添加)", + "creator is null, cannot be added(创建者为空,无法添加)"), + ENGINE_TYPE_IS_NULL( + 14100, + "engine type is null, cannot be added(引擎类型为空,无法添加)", + "engine type is null, cannot be added(引擎类型为空,无法添加)"), + INCORRECT_FIXED_SUCH( + 14100, + "The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-2.4.3(保存的引擎类型参数有误,请按照固定格式传送,例如spark-2.4.3)", + "The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-2.4.3(保存的引擎类型参数有误,请按照固定格式传送,例如spark-2.4.3)"), + INCOMPLETE_RECONFIRM( + 14100, + "Incomplete request parameters, please reconfirm(请求参数不完整,请重新确认)", + "Incomplete request parameters, please reconfirm(请求参数不完整,请重新确认)"), + ONLY_ADMIN_CAN_MODIFY( + 14100, + "only admin can modify category(只有管理员才能修改目录)", + "only admin can modify category(只有管理员才能修改目录)"), + THE_LABEL_PARAMETER_IS_EMPTY( + 14100, " The label parameter is empty(标签参数为空)", " The label parameter is empty(标签参数为空)"), + ERROR_VALIDATOR_RANGE( + 14100, "error validator range!(错误验证器范围!)", "error validator range!(错误验证器范围!)"), + TYPE_OF_LABEL_NOT_SUPPORTED( + 14100, + "this type of label is not supported:{}(不支持这种类型的标签:{})", + "this type of label is not supported:{}(不支持这种类型的标签:{})"); + + /** 错误码 */ + private int errorCode; + /** 错误描述 */ + private String errorDesc; + /** 错误可能出现的原因 */ + private String comment; + + LinkisConfigurationErrorCodeSummary(int errorCode, String errorDesc, String comment) { + ErrorCodeUtils.validateErrorCode(errorCode, 10000, 24999); + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/exception/ConfigurationException.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/exception/ConfigurationException.java index f91165c526b..02add432947 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/exception/ConfigurationException.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/exception/ConfigurationException.java @@ -19,16 +19,16 @@ import org.apache.linkis.common.exception.ErrorException; -public class ConfigurationException extends ErrorException { +import static org.apache.linkis.configuration.errorcode.LinkisConfigurationErrorCodeSummary.BUILD_LABEL_ID; - public static final int CONFIGURATION_ERROR_CODE = 14100; +public class ConfigurationException extends ErrorException { public ConfigurationException(String message) { - super(14100, message); + super(BUILD_LABEL_ID.getErrorCode(), message); } public ConfigurationException(String message, Throwable throwable) { - super(14100, message); + super(BUILD_LABEL_ID.getErrorCode(), message); initCause(throwable); } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java index d56a159c396..c2106a8acb5 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java @@ -55,6 +55,8 @@ import java.util.List; import java.util.Map; +import static org.apache.linkis.configuration.errorcode.LinkisConfigurationErrorCodeSummary.*; + @Api(tags = "parameter configuration") @RestController @RequestMapping(path = "/configuration") @@ -90,11 +92,11 @@ public Message addKeyForEngine( if (StringUtils.isBlank(engineType) || StringUtils.isBlank(version) || StringUtils.isBlank(token)) { - throw new ConfigurationException("params cannot be empty!"); + throw new ConfigurationException(PARAMS_CANNOT_BE_EMPTY.getErrorDesc()); } // todo 检验token if (!token.equals(ConfigurationConfiguration.COPYKEYTOKEN)) { - throw new ConfigurationException("token is error"); + throw new ConfigurationException(TOKEN_IS_ERROR.getErrorDesc()); } ConfigKey configKey = BDPJettyServerHelper.gson().fromJson(keyJson, ConfigKey.class); configurationService.addKeyForEngine(engineType, version, configKey); @@ -150,10 +152,10 @@ public Message createFirstCategory(HttpServletRequest request, @RequestBody Json String categoryName = jsonNode.get("categoryName").asText(); String description = jsonNode.get("description").asText(); if (StringUtils.isEmpty(categoryName) || categoryName.equals(NULL)) { - throw new ConfigurationException("categoryName is null, cannot be added"); + throw new ConfigurationException(IS_NULL_CANNOT_BE_ADDED.getErrorDesc()); } if (StringUtils.isEmpty(categoryName) || categoryName.contains("-")) { - throw new ConfigurationException("categoryName cannot be included '-'"); + throw new ConfigurationException(CANNOT_BE_INCLUDED.getErrorDesc()); } categoryService.createFirstCategory(categoryName, description); return Message.ok(); @@ -192,10 +194,10 @@ public Message createSecondCategory(HttpServletRequest request, @RequestBody Jso String version = jsonNode.get("version").asText(); String description = jsonNode.get("description").asText(); if (categoryId <= 0) { - throw new ConfigurationException("creator is null, cannot be added"); + throw new ConfigurationException(CREATOR_IS_NULL_CANNOT_BE_ADDED.getErrorDesc()); } if (StringUtils.isEmpty(engineType) || engineType.toLowerCase().equals(NULL)) { - throw new ConfigurationException("engine type is null, cannot be added"); + throw new ConfigurationException(ENGINE_TYPE_IS_NULL.getErrorDesc()); } if (StringUtils.isEmpty(version) || version.toLowerCase().equals(NULL)) { version = LabelUtils.COMMON_VALUE; @@ -242,7 +244,7 @@ public Message saveFullTree(HttpServletRequest req, @RequestBody JsonNode json) String[] tmpString = engineType.split("-"); if (tmpString.length != 2) { throw new ConfigurationException( - "The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-2.4.3(保存的引擎类型参数有误,请按照固定格式传送,例如spark-2.4.3)"); + INCORRECT_FIXED_SUCH.getErrorDesc()); } engine = tmpString[0]; version = tmpString[1]; @@ -277,7 +279,7 @@ public Message updateCategoryInfo(HttpServletRequest request, @RequestBody JsonN description = jsonNode.get("description").asText(); categoryId = jsonNode.get("categoryId").asInt(); } catch (Exception e) { - throw new ConfigurationException("请求参数不完整,请重新确认"); + throw new ConfigurationException(INCOMPLETE_RECONFIRM.getErrorDesc()); } if (description != null) { categoryService.updateCategory(categoryId, description); @@ -313,7 +315,7 @@ public Message rpcTest( private void checkAdmin(String userName) throws ConfigurationException { if (!Configuration.isAdmin(userName)) { - throw new ConfigurationException("only admin can modify category(只有管理员才能修改目录)"); + throw new ConfigurationException(ONLY_ADMIN_CAN_MODIFY.getErrorDesc()); } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java index bd59f4818f5..dd768ac1723 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java @@ -45,6 +45,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.linkis.configuration.errorcode.LinkisConfigurationErrorCodeSummary.*; + @Service public class ConfigKeyServiceImpl implements ConfigKeyService { @@ -62,13 +64,14 @@ public ConfigValue saveConfigValue(ConfigKeyValue configKeyValue, List> if (StringUtils.isBlank(configKeyValue.getConfigValue()) || StringUtils.isBlank(configKeyValue.getKey())) { - throw new ConfigurationException("key or value cannot be null"); + throw new ConfigurationException(KEY_OR_VALUE_CANNOT.getErrorDesc()); } LabelParameterParser.labelCheck(labelList); - List configKeys = configMapper.seleteKeyByKeyName(configKeyValue.getKey()); + List configKeys = configMapper.selectKeyByKeyName(configKeyValue.getKey()); if (null == configKeys || configKeys.isEmpty()) { - throw new ConfigurationException("config key not exists: " + configKeyValue.getKey()); + throw new ConfigurationException( + CONFIG_KEY_NOT_EXISTS.getErrorDesc() + configKeyValue.getKey()); } ConfigKey configKey = configKeys.get(0); EngineTypeLabel engineTypeLabel = LabelUtil.getEngineTypeLabel(labelList); @@ -117,10 +120,10 @@ private CombinedLabel getCombinedLabel(List> labelList) throws Configur try { combinedLabel = (CombinedLabel) combinedLabelBuilder.build("", labelList); } catch (LabelErrorException e) { - throw new ConfigurationException("Failed to build label", e); + throw new ConfigurationException(FAILED_TO_BUILD_LABEL.getErrorDesc(), e); } if (null == combinedLabel) { - throw new ConfigurationException("Failed to build label ,label is null"); + throw new ConfigurationException(BUILD_LABEL_IS_NULL.getErrorDesc()); } return combinedLabel; } @@ -129,20 +132,21 @@ private CombinedLabel getCombinedLabel(List> labelList) throws Configur public List getConfigValue(String key, List> labelList) throws ConfigurationException { if (StringUtils.isBlank(key)) { - throw new ConfigurationException("configKey cannot be null"); + throw new ConfigurationException(CONFIGKEY_CANNOT_BE_NULL.getErrorDesc()); } LabelParameterParser.labelCheck(labelList); - List configKeys = configMapper.seleteKeyByKeyName(key); + List configKeys = configMapper.selectKeyByKeyName(key); if (null == configKeys || configKeys.isEmpty()) { - throw new ConfigurationException("config key not exists: " + key); + throw new ConfigurationException(CONFIG_KEY_NOT_EXISTS.getErrorDesc() + key); } CombinedLabel combinedLabel = getCombinedLabel(labelList); ConfigLabel configLabel = labelMapper.getLabelByKeyValue(combinedLabel.getLabelKey(), combinedLabel.getStringValue()); if (null == configLabel || configLabel.getId() < 0) { - throw new ConfigurationException("label not exists: " + combinedLabel.getStringValue()); + throw new ConfigurationException( + LABEL_NOT_EXISTS.getErrorDesc() + combinedLabel.getStringValue()); } List configValues = new ArrayList<>(); for (ConfigKey configKey : configKeys) { diff --git a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala index 887ff3575b0..269438eca3d 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala +++ b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala @@ -203,7 +203,7 @@ class ConfigurationService extends Logging { if (setting.getId != null) { key = configMapper.selectKeyByKeyID(setting.getId) } else { - val keys = configMapper.seleteKeyByKeyName(setting.getKey) + val keys = configMapper.selectKeyByKeyName(setting.getKey) if (null != keys && !keys.isEmpty) { key = keys.get(0) } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/util/LabelParameterParser.scala b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/util/LabelParameterParser.scala index 2fb9ef50d06..d257579da70 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/util/LabelParameterParser.scala +++ b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/util/LabelParameterParser.scala @@ -18,6 +18,7 @@ package org.apache.linkis.configuration.util import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.configuration.errorcode.LinkisConfigurationErrorCodeSummary.THE_LABEL_PARAMETER_IS_EMPTY import org.apache.linkis.configuration.exception.ConfigurationException import org.apache.linkis.governance.common.conf.GovernanceCommonConf import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext @@ -89,7 +90,7 @@ object LabelParameterParser { } true } else { - throw new ConfigurationException("The label parameter is empty") + throw new ConfigurationException(THE_LABEL_PARAMETER_IS_EMPTY.getErrorDesc()) } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/validate/FloatValidator.scala b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/validate/FloatValidator.scala index 15ee09eabe9..a256073ccb9 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/validate/FloatValidator.scala +++ b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/validate/FloatValidator.scala @@ -18,6 +18,7 @@ package org.apache.linkis.configuration.validate import org.apache.linkis.common.utils.Logging +import org.apache.linkis.configuration.errorcode.LinkisConfigurationErrorCodeSummary.ERROR_VALIDATOR_RANGE import org.apache.linkis.configuration.exception.ConfigurationException import com.google.gson.GsonBuilder @@ -28,7 +29,7 @@ class FloatValidator extends Validator with Logging { try { val rangArray = new GsonBuilder().create().fromJson(range, classOf[Array[Double]]) if (rangArray.size != 2) { - throw new ConfigurationException("error validator range!") + throw new ConfigurationException(ERROR_VALIDATOR_RANGE.getErrorDesc) } value.toDouble >= rangArray.sorted.apply(0) && value.toDouble <= rangArray.sorted.apply(1) } catch { diff --git a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/validate/NumericalValidator.scala b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/validate/NumericalValidator.scala index 87aa62f0948..696b6442a44 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/validate/NumericalValidator.scala +++ b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/validate/NumericalValidator.scala @@ -18,6 +18,7 @@ package org.apache.linkis.configuration.validate import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.configuration.errorcode.LinkisConfigurationErrorCodeSummary.ERROR_VALIDATOR_RANGE import org.apache.linkis.configuration.exception.ConfigurationException import com.google.gson.GsonBuilder @@ -29,7 +30,7 @@ class NumericalValidator extends Validator with Logging { val rangArray = new GsonBuilder().create().fromJson(range, classOf[Array[Int]]) val valueInt = Integer.parseInt(value) if (rangArray.size != 2) { - throw new ConfigurationException("error validator range!") + throw new ConfigurationException(ERROR_VALIDATOR_RANGE.getErrorDesc) } valueInt >= rangArray.sorted.apply(0) && valueInt <= rangArray.sorted.apply(1) } { diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java index 62c8bb1e7a2..57cacd7a38b 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java @@ -123,10 +123,10 @@ void testSelectKeyByKeyID() { } @Test - void testSeleteKeyByKeyName() { + void testSelectKeyByKeyName() { // TODO 查询结果转换异常 // ConfigKey configKey = - // configMapper.seleteKeyByKeyName("wds.linkis.rm.yarnqueue"); + // configMapper.selectKeyByKeyName("wds.linkis.rm.yarnqueue"); // assertEquals("ide", configKey.getDefaultValue()); // System.out.println(configKey.getDefaultValue()); } diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-client/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-client/pom.xml index 0fb5299bec9..d41e93f1248 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-client/pom.xml +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-client/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - ../../pom.xml + ../../../pom.xml linkis-cs-client diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-common/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-common/pom.xml index 26357d2d93a..d590de40168 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-common/pom.xml +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-common/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - ../../pom.xml + ../../../pom.xml linkis-cs-common diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-listener/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-listener/pom.xml index b23451e7a85..c5cf139e9a7 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-listener/pom.xml +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-listener/pom.xml @@ -21,7 +21,7 @@ org.apache.linkis linkis 1.3.0 - ../../pom.xml + ../../../pom.xml linkis-cs-listener diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/assembly/distribution.xml index 4fc0ae61438..22638e59085 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/assembly/distribution.xml +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-cs-server dir diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/DefaultContextSearch.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/DefaultContextSearch.java index abd95d3f726..1cda724dfca 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/DefaultContextSearch.java +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/DefaultContextSearch.java @@ -34,6 +34,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.linkis.cs.errorcode.LinkisCsServerErrorCodeSummary.UNKNOWN_CONDITION_TYPE; + public class DefaultContextSearch implements ContextSearch { private static Logger logger = LoggerFactory.getLogger(DefaultContextSearch.class); @@ -84,6 +86,7 @@ private ConditionExecution getExecution( return new ContextValueTypeConditionExecution( (ContextValueTypeCondition) condition, contextCacheService, contextID); } - throw new ContextSearchFailedException(1200001, "Unknown Condition Type"); + throw new ContextSearchFailedException( + UNKNOWN_CONDITION_TYPE.getErrorCode(), UNKNOWN_CONDITION_TYPE.getErrorDesc()); } } diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/errorcode/LinkisCsServerErrorCodeSummary.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/errorcode/LinkisCsServerErrorCodeSummary.java new file mode 100644 index 00000000000..1f7b5fee653 --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/errorcode/LinkisCsServerErrorCodeSummary.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.errorcode; + +public enum LinkisCsServerErrorCodeSummary { + UNKNOWN_CONDITION_TYPE( + 1200001, "Unknown Condition Type(未知条件类型)", "Unknown Condition Type(未知条件类型)"); + + /** (errorCode)错误码 */ + private int errorCode; + /** (errorDesc)错误描述 */ + private String errorDesc; + /** Possible reasons for the error(错误可能出现的原因) */ + private String comment; + + LinkisCsServerErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextHistoryMapper.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextHistoryMapper.xml index 08f2ac1d3d2..b29902b87ff 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextHistoryMapper.xml +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextHistoryMapper.xml @@ -55,9 +55,9 @@ `source` = #{pHistory.source}, `context_type` = #{pHistory.contextType}, `keyword` = #{pHistory.keyword}, - `update_time` = #{updateTime}, - `create_time` = #{createTime}, - `access_time` = #{accessTime}, + `update_time` = #{pHistory.updateTime}, + `create_time` = #{pHistory.createTime}, + `access_time` = #{pHistory.accessTime}, WHERE id = #{pHistory.id} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextIDListenerMapper.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextIDListenerMapper.xml index 9a888dc9610..33b8781e938 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextIDListenerMapper.xml +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextIDListenerMapper.xml @@ -27,7 +27,7 @@ INSERT INTO linkis_ps_cs_context_listener() - VALUES (#{listener.id},#{listener.source},#{listener.contextId},#{createTime},#{updateTime},#{accessTime}) + VALUES (#{listener.id},#{listener.source},#{listener.contextId},#{listener.createTime},#{listener.updateTime},#{listener.accessTime}) diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextIDMapper.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextIDMapper.xml index 38c3c721678..67f8e894724 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextIDMapper.xml +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextIDMapper.xml @@ -87,4 +87,19 @@ WHERE id = #{contextId} + + \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextKeyListenerMapper.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextKeyListenerMapper.xml index 310709f638f..8c38e4a162e 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextKeyListenerMapper.xml +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/persistence/dao/impl/contextKeyListenerMapper.xml @@ -27,7 +27,7 @@ INSERT INTO linkis_ps_cs_context_map_listener() - VALUES (#{listener.id},#{listener.source},#{listener.keyId},#{createTime},#{updateTime},#{accessTime}) + VALUES (#{listener.id},#{listener.source},#{listener.keyId},#{listener.createTime},#{listener.updateTime},#{listener.accessTime}) @@ -41,7 +41,7 @@ - SELECT * FROM linkis_ps_cs_context_map_listener WHERE #{keyid} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/scala/org/apache/linkis/cs/highavailable/ha/instancealias/impl/DefaultInstanceAliasConverter.scala b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/scala/org/apache/linkis/cs/highavailable/ha/instancealias/impl/DefaultInstanceAliasConverter.scala index b9277900dfe..f5f54efa8dc 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/scala/org/apache/linkis/cs/highavailable/ha/instancealias/impl/DefaultInstanceAliasConverter.scala +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/scala/org/apache/linkis/cs/highavailable/ha/instancealias/impl/DefaultInstanceAliasConverter.scala @@ -21,12 +21,10 @@ import org.apache.linkis.cs.highavailable.ha.instancealias.InstanceAliasConverte import org.apache.commons.lang3.StringUtils -import org.springframework.stereotype.Component - import java.util.Base64 import java.util.regex.Pattern -//@Component +// @Component class DefaultInstanceAliasConverter extends InstanceAliasConverter { val pattern = Pattern.compile("[a-zA-Z\\d=\\+/]+") diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/scala/org/apache/linkis/cs/server/scheduler/impl/CsExecutor.scala b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/scala/org/apache/linkis/cs/server/scheduler/impl/CsExecutor.scala index 0fdefbcaf0e..80f9d6f67d9 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/scala/org/apache/linkis/cs/server/scheduler/impl/CsExecutor.scala +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/scala/org/apache/linkis/cs/server/scheduler/impl/CsExecutor.scala @@ -43,7 +43,7 @@ class CsExecutor extends Executor { override def state: ExecutorState.ExecutorState = this._state - override def getExecutorInfo = new ExecutorInfo(id, state) + override def getExecutorInfo: ExecutorInfo = new ExecutorInfo(id, state) @throws[IOException] override def close(): Unit = {} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/Scan.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/Scan.java new file mode 100644 index 00000000000..df9fb2621ff --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/Scan.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs; + +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; + +import org.mybatis.spring.annotation.MapperScan; + +@EnableAutoConfiguration +@MapperScan("org.apache.linkis.cs.persistence.dao") +public class Scan {} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/WebApplicationServer.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/WebApplicationServer.java new file mode 100644 index 00000000000..b4e0d1e9fc6 --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/WebApplicationServer.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs; + +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.web.servlet.ServletComponentScan; +import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; +import org.springframework.context.annotation.ComponentScan; + +@EnableAutoConfiguration +@ServletComponentScan +@ComponentScan +public class WebApplicationServer extends SpringBootServletInitializer { + + public static void main(String[] args) { + new SpringApplicationBuilder(WebApplicationServer.class).run(args); + } +} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/errorcode/LinkisCsServerErrorCodeSummaryTest.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/errorcode/LinkisCsServerErrorCodeSummaryTest.java new file mode 100644 index 00000000000..5ee8ba98c0e --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/errorcode/LinkisCsServerErrorCodeSummaryTest.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.errorcode; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class LinkisCsServerErrorCodeSummaryTest { + + @Test + @DisplayName("enumTest") + public void enumTest() { + + int errorCode = LinkisCsServerErrorCodeSummary.UNKNOWN_CONDITION_TYPE.getErrorCode(); + String errorDesc = LinkisCsServerErrorCodeSummary.UNKNOWN_CONDITION_TYPE.getErrorDesc(); + String comment = LinkisCsServerErrorCodeSummary.UNKNOWN_CONDITION_TYPE.getComment(); + + Assertions.assertTrue(errorCode == 1200001); + Assertions.assertNotNull(errorDesc); + Assertions.assertNotNull(comment); + } +} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/exception/ContextSearchFailedExceptionTest.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/exception/ContextSearchFailedExceptionTest.java new file mode 100644 index 00000000000..75c7aaf0132 --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/exception/ContextSearchFailedExceptionTest.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.exception; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ContextSearchFailedExceptionTest { + + @Test + @DisplayName("exceptionTest") + public void exceptionTest() { + + int errorCode = 1200001; + String errorDesc = "Unknown Condition Type(未知条件类型)"; + ContextSearchFailedException exception = new ContextSearchFailedException(errorCode, errorDesc); + + Assertions.assertTrue(errorCode == exception.getErrCode()); + Assertions.assertEquals(errorDesc, exception.getDesc()); + } +} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/conf/PersistenceConfTest.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/conf/PersistenceConfTest.java new file mode 100644 index 00000000000..dc1e8b93f80 --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/conf/PersistenceConfTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.persistence.conf; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class PersistenceConfTest { + + @Test + @DisplayName("constTest") + public void constTest() { + + String tuningClassValue = PersistenceConf.TUNING_CLASS.getValue(); + String tuningMethodValue = PersistenceConf.TUNING_METHOD.getValue(); + Boolean packageHeaderValue = + PersistenceConf.ENABLE_CS_DESERIALIZE_REPLACE_PACKAGE_HEADER.getValue(); + String csDeserializeReplacePackageHeaderValue = + PersistenceConf.CS_DESERIALIZE_REPLACE_PACKAGE_HEADER.getValue(); + String csidPackageHeader = PersistenceConf.CSID_PACKAGE_HEADER; + + Assertions.assertNotNull(tuningClassValue); + Assertions.assertNotNull(tuningMethodValue); + Assertions.assertTrue(packageHeaderValue.booleanValue()); + Assertions.assertNotNull(csDeserializeReplacePackageHeaderValue); + Assertions.assertNotNull(csidPackageHeader); + } +} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/BaseDaoTest.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/BaseDaoTest.java new file mode 100644 index 00000000000..1080bdcee91 --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/BaseDaoTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.persistence.dao; + +import org.apache.linkis.cs.Scan; + +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; +import org.springframework.transaction.annotation.EnableTransactionManagement; +import org.springframework.transaction.annotation.Transactional; + +@SpringBootTest(classes = Scan.class) +@Transactional +@Rollback(true) +@EnableTransactionManagement +public abstract class BaseDaoTest {} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextHistoryMapperTest.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextHistoryMapperTest.java new file mode 100644 index 00000000000..e6b9ff1d231 --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextHistoryMapperTest.java @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.persistence.dao; + +import org.apache.linkis.cs.common.entity.enumeration.ContextType; +import org.apache.linkis.cs.common.entity.source.CommonContextID; +import org.apache.linkis.cs.common.entity.source.ContextID; +import org.apache.linkis.cs.persistence.entity.PersistenceContextHistory; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.Date; +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ContextHistoryMapperTest extends BaseDaoTest { + + private static final Logger LOG = LoggerFactory.getLogger(ContextHistoryMapperTest.class); + + @Autowired private ContextHistoryMapper contextHistoryMapper; + + private PersistenceContextHistory createPersistenceContextHistory() { + PersistenceContextHistory history = new PersistenceContextHistory(); + history.setAccessTime(new Date()); + history.setContextId("1"); + history.setContextType(ContextType.UDF); + history.setCreateTime(new Date()); + history.setId(3); + history.setUpdateTime(new Date()); + history.setKeyword("success"); + history.setHistoryJson("{}"); + history.setSource("source"); + return history; + } + + @Test + @DisplayName("createHistoryTest") + public void createHistoryTest() { + ContextID contextID = new CommonContextID(); + contextID.setContextId("1"); + PersistenceContextHistory history = createPersistenceContextHistory(); + contextHistoryMapper.createHistory(history); + PersistenceContextHistory contextHistory = contextHistoryMapper.getHistory(contextID, 3L); + Assertions.assertNotNull(contextHistory); + } + + @Test + @DisplayName("getHistoryBySourceTest") + public void getHistoryBySourceTest() { + + ContextID contextID = new CommonContextID(); + contextID.setContextId("1"); + PersistenceContextHistory history = createPersistenceContextHistory(); + contextHistoryMapper.createHistory(history); + PersistenceContextHistory contextHistory = + contextHistoryMapper.getHistoryBySource(contextID, "source"); + Assertions.assertNotNull(contextHistory); + } + + @Test + @DisplayName("getHistoriesByContextIDTest") + public void getHistoriesByContextIDTest() { + + ContextID contextID = new CommonContextID(); + contextID.setContextId("1"); + + PersistenceContextHistory history = createPersistenceContextHistory(); + contextHistoryMapper.createHistory(history); + + List histories = + contextHistoryMapper.getHistoriesByContextID(contextID); + Assertions.assertTrue(histories.size() > 0); + } + + @Test + @DisplayName("removeHistoryTest") + public void removeHistoryTest() { + + ContextID contextID = new CommonContextID(); + contextID.setContextId("1"); + + PersistenceContextHistory history = createPersistenceContextHistory(); + contextHistoryMapper.createHistory(history); + contextHistoryMapper.removeHistory(contextID, "source"); + + List histories = + contextHistoryMapper.getHistoriesByContextID(contextID); + Assertions.assertTrue(histories.size() == 0); + } + + @Test + @DisplayName("updateHistoryTest") + public void updateHistoryTest() { + String keyWord = "Test it."; + ContextID contextID = new CommonContextID(); + contextID.setContextId("1"); + + PersistenceContextHistory history = createPersistenceContextHistory(); + contextHistoryMapper.createHistory(history); + history.setKeyword(keyWord); + contextHistoryMapper.updateHistory(contextID, history); + + PersistenceContextHistory contextHistory = contextHistoryMapper.getHistory(contextID, 3L); + Assertions.assertEquals(keyWord, contextHistory.getKeyword()); + } + + @Test + @DisplayName("searchByKeywordsTest") + public void searchByKeywordsTest() { + String[] keyWords = {"success"}; + + ContextID contextID = new CommonContextID(); + contextID.setContextId("1"); + + PersistenceContextHistory history = createPersistenceContextHistory(); + contextHistoryMapper.createHistory(history); + + List histories = + contextHistoryMapper.searchByKeywords(contextID, keyWords); + Assertions.assertTrue(histories.size() > 0); + } + + @Test + @DisplayName("searchByKeywordsAndTypeTest") + public void searchByKeywordsAndTypeTest() { + String[] keyWords = {"success"}; + + ContextID contextID = new CommonContextID(); + contextID.setContextId("1"); + + PersistenceContextHistory history = createPersistenceContextHistory(); + contextHistoryMapper.createHistory(history); + List histories = + contextHistoryMapper.searchByKeywordsAndType(ContextType.UDF, keyWords); + Assertions.assertTrue(histories.size() > 0); + } +} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextIDListenerMapperTest.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextIDListenerMapperTest.java new file mode 100644 index 00000000000..313f51f34f4 --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextIDListenerMapperTest.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.persistence.dao; + +import org.apache.linkis.cs.common.entity.source.CommonContextID; +import org.apache.linkis.cs.common.entity.source.ContextID; +import org.apache.linkis.cs.persistence.entity.PersistenceContextIDListener; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.Date; +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ContextIDListenerMapperTest extends BaseDaoTest { + + @Autowired private ContextIDListenerMapper contextIDListenerMapper; + + private PersistenceContextIDListener createPersistenceContextIDListener() { + + PersistenceContextIDListener persistenceContextIDListener = new PersistenceContextIDListener(); + persistenceContextIDListener.setAccessTime(new Date()); + persistenceContextIDListener.setContextId("1"); + persistenceContextIDListener.setCreateTime(new Date()); + persistenceContextIDListener.setId(3); + persistenceContextIDListener.setUpdateTime(new Date()); + persistenceContextIDListener.setSource("source"); + return persistenceContextIDListener; + } + + @Test + @DisplayName("createIDListenerTest") + public void createIDListenerTest() { + + ContextID contextID = new CommonContextID(); + contextID.setContextId("1"); + + PersistenceContextIDListener listener = createPersistenceContextIDListener(); + contextIDListenerMapper.createIDListener(listener); + + List all = contextIDListenerMapper.getAll(contextID); + Assertions.assertTrue(all.size() > 0); + } + + @Test + @DisplayName("removeTest") + public void removeTest() { + + ContextID contextID = new CommonContextID(); + contextID.setContextId("1"); + + PersistenceContextIDListener listener = createPersistenceContextIDListener(); + contextIDListenerMapper.createIDListener(listener); + contextIDListenerMapper.remove(listener); + List all = contextIDListenerMapper.getAll(contextID); + Assertions.assertTrue(all.size() == 0); + } + + @Test + @DisplayName("removeAllTest") + public void removeAllTest() { + + ContextID contextID = new CommonContextID(); + contextID.setContextId("1"); + + PersistenceContextIDListener listener = createPersistenceContextIDListener(); + contextIDListenerMapper.createIDListener(listener); + contextIDListenerMapper.removeAll(contextID); + List all = contextIDListenerMapper.getAll(contextID); + Assertions.assertTrue(all.size() == 0); + } +} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextIDMapperTest.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextIDMapperTest.java new file mode 100644 index 00000000000..eb23c4398ef --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextIDMapperTest.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.persistence.dao; + +import org.apache.linkis.cs.persistence.entity.PersistenceContextID; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.Date; +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ContextIDMapperTest extends BaseDaoTest { + + @Autowired private ContextIDMapper contextIDMapper; + + private PersistenceContextID createPersistenceContextID() { + + PersistenceContextID persistenceContextID = new PersistenceContextID(); + persistenceContextID.setAccessTime(new Date()); + persistenceContextID.setCreateTime(new Date()); + persistenceContextID.setUpdateTime(new Date()); + persistenceContextID.setApplication("application-1"); + persistenceContextID.setBackupInstance("1"); + persistenceContextID.setContextId("1"); + persistenceContextID.setExpireTime(new Date()); + persistenceContextID.setSource("source"); + persistenceContextID.setUser("hadoop"); + persistenceContextID.setInstance("1"); + return persistenceContextID; + } + + @Test + @DisplayName("createContextIDTest") + public void createContextIDTest() { + + PersistenceContextID contextID = createPersistenceContextID(); + contextIDMapper.createContextID(contextID); + PersistenceContextID persistenceContextID = contextIDMapper.getContextID("1"); + Assertions.assertNotNull(persistenceContextID); + } + + @Test + @DisplayName("deleteContextIDTest") + public void deleteContextIDTest() { + + PersistenceContextID contextID = createPersistenceContextID(); + contextIDMapper.createContextID(contextID); + contextIDMapper.deleteContextID("1"); + PersistenceContextID persistenceContextID = contextIDMapper.getContextID("1"); + Assertions.assertNull(persistenceContextID); + } + + @Test + @DisplayName("updateContextIDTest") + public void updateContextIDTest() { + String newName = "hadoops"; + PersistenceContextID contextID = createPersistenceContextID(); + contextIDMapper.createContextID(contextID); + contextID.setUser(newName); + contextIDMapper.updateContextID(contextID); + PersistenceContextID persistenceContextID = contextIDMapper.getContextID("1"); + Assertions.assertEquals(newName, persistenceContextID.getUser()); + } + + @Test + @DisplayName("searchContextIDTest") + public void searchContextIDTest() { + + PersistenceContextID contextID = createPersistenceContextID(); + contextIDMapper.createContextID(contextID); + List contextIDS = contextIDMapper.searchContextID(contextID); + Assertions.assertNotNull(contextIDS); + } + + @Test + @DisplayName("getAllContextIDByTimeTest") + public void getAllContextIDByTimeTest() { + + PersistenceContextID contextID = createPersistenceContextID(); + contextIDMapper.createContextID(contextID); + List contextIDS = + contextIDMapper.getAllContextIDByTime(null, null, null, null, null, null); + Assertions.assertTrue(contextIDS.size() > 0); + } +} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextKeyListenerMapperTest.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextKeyListenerMapperTest.java new file mode 100644 index 00000000000..7d1812e5179 --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextKeyListenerMapperTest.java @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.persistence.dao; + +import org.apache.linkis.cs.common.entity.listener.CommonContextKeyListenerDomain; +import org.apache.linkis.cs.common.entity.listener.ContextKeyListenerDomain; +import org.apache.linkis.cs.persistence.entity.PersistenceContextKeyListener; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ContextKeyListenerMapperTest extends BaseDaoTest { + + @Autowired private ContextKeyListenerMapper contextKeyListenerMapper; + + private PersistenceContextKeyListener createPersistenceContextKeyListener() { + + PersistenceContextKeyListener persistenceContextKeyListener = + new PersistenceContextKeyListener(); + persistenceContextKeyListener.setCreateTime(new Date()); + persistenceContextKeyListener.setUpdateTime(new Date()); + persistenceContextKeyListener.setKeyId(1); + persistenceContextKeyListener.setId(1); + persistenceContextKeyListener.setSource("source"); + + return persistenceContextKeyListener; + } + + @Test + @DisplayName("createKeyListenerTest") + public void createKeyListenerTest() { + List keyIds = new ArrayList<>(); + keyIds.add(1); + PersistenceContextKeyListener listener = createPersistenceContextKeyListener(); + contextKeyListenerMapper.createKeyListener(listener); + List all = contextKeyListenerMapper.getAll(keyIds); + Assertions.assertTrue(all.size() > 0); + } + + @Test + @DisplayName("removeTest") + public void removeTest() { + List keyIds = new ArrayList<>(); + keyIds.add(1); + + ContextKeyListenerDomain contextKeyListenerDomain = new CommonContextKeyListenerDomain(); + contextKeyListenerDomain.setSource("source"); + + PersistenceContextKeyListener listener = createPersistenceContextKeyListener(); + contextKeyListenerMapper.createKeyListener(listener); + + contextKeyListenerMapper.remove(contextKeyListenerDomain, 1); + List all = contextKeyListenerMapper.getAll(keyIds); + Assertions.assertTrue(all.size() == 0); + } + + @Test + @DisplayName("removeAllTest") + public void removeAllTest() { + + List keyIds = new ArrayList<>(); + keyIds.add(1); + + ContextKeyListenerDomain contextKeyListenerDomain = new CommonContextKeyListenerDomain(); + contextKeyListenerDomain.setSource("source"); + + PersistenceContextKeyListener listener = createPersistenceContextKeyListener(); + contextKeyListenerMapper.createKeyListener(listener); + + contextKeyListenerMapper.removeAll(keyIds); + List all = contextKeyListenerMapper.getAll(keyIds); + Assertions.assertTrue(all.size() == 0); + } + + @Test + @DisplayName("getAllTest") + public void getAllTest() { + + List keyIds = new ArrayList<>(); + keyIds.add(1); + + ContextKeyListenerDomain contextKeyListenerDomain = new CommonContextKeyListenerDomain(); + contextKeyListenerDomain.setSource("source"); + + PersistenceContextKeyListener listener = createPersistenceContextKeyListener(); + contextKeyListenerMapper.createKeyListener(listener); + List all = contextKeyListenerMapper.getAll(keyIds); + Assertions.assertTrue(all.size() > 0); + } +} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/utils/PersistenceUtilsTest.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/utils/PersistenceUtilsTest.java new file mode 100644 index 00000000000..6e86288174b --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/utils/PersistenceUtilsTest.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.persistence.utils; + +import org.apache.linkis.cs.persistence.util.PersistenceUtils; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class PersistenceUtilsTest { + + @Test + @DisplayName("generateSetMethodTest") + public void generateSetMethodTest() { + + String fieldName = "generateSetMethodTest"; + String setMethod = PersistenceUtils.generateSetMethod(fieldName); + Assertions.assertNotNull(setMethod); + } +} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/server/conf/ContextServerConfTest.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/server/conf/ContextServerConfTest.java new file mode 100644 index 00000000000..e5ce03b7dca --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/server/conf/ContextServerConfTest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.server.conf; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ContextServerConfTest { + + @Test + @DisplayName("constTest") + public void constTest() { + + String keywordScanPackage = ContextServerConf.KEYWORD_SCAN_PACKAGE; + int csSchedulerMaxRunningJobs = ContextServerConf.CS_SCHEDULER_MAX_RUNNING_JOBS; + long csSchedulerMaxAskExecutorTimes = ContextServerConf.CS_SCHEDULER_MAX_ASK_EXECUTOR_TIMES; + long csSchedulerJobWaitMills = ContextServerConf.CS_SCHEDULER_JOB_WAIT_MILLS; + + Assertions.assertNotNull(keywordScanPackage); + Assertions.assertTrue(100 == csSchedulerMaxRunningJobs); + Assertions.assertTrue(1000 == csSchedulerMaxAskExecutorTimes); + Assertions.assertTrue(10000 == csSchedulerJobWaitMills); + } +} diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/resources/application.properties b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/resources/application.properties new file mode 100644 index 00000000000..24a4cff1cb4 --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/resources/application.properties @@ -0,0 +1,59 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +#wds.linkis.test.mode=true +wds.linkis.server.version=v1 + +#test +wds.linkis.test.mode=true +wds.linkis.test.user=hadoop + + +##Linkis governance station administrators +wds.linkis.governance.station.admin=hadoop +wds.linkis.gateway.conf.publicservice.list=query,jobhistory,application,configuration,filesystem,udf,variable,microservice,errorcode,bml,datasource +# + +#logging.level.root=debug +#logging.file=./test.log +#debug=true + +spring.datasource.driver-class-name=org.h2.Driver +spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript from 'classpath:create.sql' +spring.datasource.username=sa +spring.datasource.password= +spring.datasource.hikari.connection-test-query=select 1 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.auto-commit=true +spring.datasource.hikari.validation-timeout=3000 +spring.datasource.hikari.pool-name=linkis-test +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.leak-detection-threshold=0 +spring.datasource.hikari.initialization-fail-timeout=1 + +spring.main.web-application-type=servlet +server.port=1234 +spring.h2.console.enabled=true + +#disable eureka discovery client +spring.cloud.service-registry.auto-registration.enabled=false +eureka.client.enabled=false +eureka.client.serviceUrl.registerWithEureka=false + +mybatis-plus.mapper-locations=classpath:org/apache/linkis/cs/persistence/dao/impl/*.xml +mybatis-plus.type-aliases-package=org.apache.linkis.cs.persistence.entity +mybatis-plus.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/resources/create.sql b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/resources/create.sql new file mode 100644 index 00000000000..33956b3fb5a --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/resources/create.sql @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +SET FOREIGN_KEY_CHECKS=0; +SET REFERENTIAL_INTEGRITY FALSE; + +DROP TABLE IF EXISTS linkis_ps_cs_context_history CASCADE; +CREATE TABLE linkis_ps_cs_context_history ( + id int(11) AUTO_INCREMENT, + context_id int(11) DEFAULT NULL, + source text, + context_type varchar(32) DEFAULT NULL, + history_json text, + keyword varchar(255) DEFAULT NULL, + update_time datetime DEFAULT CURRENT_TIMESTAMP, + create_time datetime DEFAULT CURRENT_TIMESTAMP, + access_time datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (id) +) ; + + +DROP TABLE IF EXISTS linkis_ps_cs_context_listener CASCADE; +CREATE TABLE linkis_ps_cs_context_listener ( + id int(11) AUTO_INCREMENT, + listener_source varchar(255) DEFAULT NULL, + context_id int(11) DEFAULT NULL, + update_time datetime DEFAULT CURRENT_TIMESTAMP, + create_time datetime DEFAULT CURRENT_TIMESTAMP, + access_time datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (id) +) ; + +DROP TABLE IF EXISTS linkis_ps_cs_context_id CASCADE; +CREATE TABLE linkis_ps_cs_context_id ( + id int(11) AUTO_INCREMENT, + user varchar(32) DEFAULT NULL, + application varchar(32) DEFAULT NULL, + source varchar(255) DEFAULT NULL, + expire_type varchar(32) DEFAULT NULL, + expire_time datetime DEFAULT NULL, + instance varchar(128) DEFAULT NULL, + backup_instance varchar(255) DEFAULT NULL, + update_time datetime DEFAULT CURRENT_TIMESTAMP, + create_time datetime DEFAULT CURRENT_TIMESTAMP, + access_time datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (id) +) ; + +DROP TABLE IF EXISTS linkis_ps_cs_context_map_listener CASCADE; +CREATE TABLE linkis_ps_cs_context_map_listener ( + id int(11) AUTO_INCREMENT, + listener_source varchar(255) DEFAULT NULL, + key_id int(11) DEFAULT NULL, + update_time datetime DEFAULT CURRENT_TIMESTAMP, + create_time datetime DEFAULT CURRENT_TIMESTAMP, + access_time datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (id) +) ; \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/test/java/org/apache/linkis/datasource/client/config/DatasourceClientConfigTest.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/test/java/org/apache/linkis/datasource/client/config/DatasourceClientConfigTest.java new file mode 100644 index 00000000000..40db9eabfbe --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/test/java/org/apache/linkis/datasource/client/config/DatasourceClientConfigTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.datasource.client.config; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class DatasourceClientConfigTest { + + @Test + @DisplayName("constTest") + public void constTest() { + + String metaDataService = DatasourceClientConfig.METADATA_SERVICE_MODULE().getValue(); + String dataSourceService = DatasourceClientConfig.DATA_SOURCE_SERVICE_MODULE().getValue(); + String authTokenKey = DatasourceClientConfig.AUTH_TOKEN_KEY().getValue(); + String authTokenValue = DatasourceClientConfig.AUTH_TOKEN_VALUE().getValue(); + String dataSourceClientName = + DatasourceClientConfig.DATA_SOURCE_SERVICE_CLIENT_NAME().getValue(); + Object connectionMaxSize = DatasourceClientConfig.CONNECTION_MAX_SIZE().getValue(); + Object connectionTimeOut = DatasourceClientConfig.CONNECTION_TIMEOUT().getValue(); + Object connectionReadTimeOut = DatasourceClientConfig.CONNECTION_READ_TIMEOUT().getValue(); + + Assertions.assertNotNull(metaDataService); + Assertions.assertNotNull(dataSourceService); + Assertions.assertNotNull(authTokenKey); + Assertions.assertNotNull(authTokenValue); + Assertions.assertNotNull(dataSourceClientName); + Assertions.assertNotNull(connectionMaxSize); + Assertions.assertNotNull(connectionTimeOut); + Assertions.assertNotNull(connectionReadTimeOut); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/test/java/org/apache/linkis/datasource/client/exception/DataSourceClientBuilderExceptionTest.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/test/java/org/apache/linkis/datasource/client/exception/DataSourceClientBuilderExceptionTest.java new file mode 100644 index 00000000000..40c31beb492 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/test/java/org/apache/linkis/datasource/client/exception/DataSourceClientBuilderExceptionTest.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.datasource.client.exception; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class DataSourceClientBuilderExceptionTest { + + @Test + @DisplayName("exceptionTest") + public void exceptionTest() { + + String errorDesc = "this is error"; + DataSourceClientBuilderException exception = new DataSourceClientBuilderException(errorDesc); + Assertions.assertEquals(errorDesc, exception.getDesc()); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/test/java/org/apache/linkis/datasource/client/imp/LinkisDataSourceRemoteClientTest.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/test/java/org/apache/linkis/datasource/client/imp/LinkisDataSourceRemoteClientTest.java new file mode 100644 index 00000000000..779c4b99d38 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/test/java/org/apache/linkis/datasource/client/imp/LinkisDataSourceRemoteClientTest.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.datasource.client.imp; + +import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; +import org.apache.linkis.httpclient.dws.config.DWSClientConfig; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class LinkisDataSourceRemoteClientTest { + + @Test + @DisplayName("createClientConfigTest") + public void createClientConfigTest() { + + LinkisDataSourceRemoteClient client = new LinkisDataSourceRemoteClient(); + DWSClientConfig clientConfig = client.createClientConfig(); + Assertions.assertNotNull(clientConfig); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/pom.xml index 9e76a59ca69..ca2faad272f 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../../pom.xml linkis-datasource-manager-common diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/src/main/java/org/apache/linkis/datasourcemanager/common/DataSources.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/src/main/java/org/apache/linkis/datasourcemanager/common/DataSources.java new file mode 100644 index 00000000000..d6fb45a01e4 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/src/main/java/org/apache/linkis/datasourcemanager/common/DataSources.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.datasourcemanager.common; + +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.datasourcemanager.common.domain.DataSource; +import org.apache.linkis.datasourcemanager.common.domain.DataSourceType; + +import java.util.HashMap; +import java.util.Map; + +public class DataSources { + + /** Default HDFS name */ + private static final CommonVars DEFAULT_HDFS_NAME = + CommonVars.apply("wds.linkis.server.dsm.default.hdfs.name", ".LOCAL_HDFS"); + + private static final Map DEFAULT_DATASOURCES = new HashMap<>(); + + static { + DataSourceType hdfsType = new DataSourceType(); + hdfsType.setName("hdfs"); + DataSource hdfs = new DataSource(); + hdfs.setDataSourceType(hdfsType); + hdfs.setDataSourceName(DEFAULT_HDFS_NAME.getValue()); + DEFAULT_DATASOURCES.put(hdfs.getDataSourceName(), hdfs); + DEFAULT_DATASOURCES + .values() + .forEach(dataSource -> dataSource.setCreateUser(System.getProperty("user.name"))); + } + + /** + * Find the default data source by name + * + * @param dataSourceName data source name + * @return data source + */ + public static DataSource getDefault(String dataSourceName) { + return DEFAULT_DATASOURCES.get(dataSourceName); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/src/main/java/org/apache/linkis/datasourcemanager/common/errorcode/LinkisDatasourceManagerErrorCodeSummary.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/src/main/java/org/apache/linkis/datasourcemanager/common/errorcode/LinkisDatasourceManagerErrorCodeSummary.java new file mode 100644 index 00000000000..b7a8b3d96bc --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/src/main/java/org/apache/linkis/datasourcemanager/common/errorcode/LinkisDatasourceManagerErrorCodeSummary.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.datasourcemanager.common.errorcode; + +public enum LinkisDatasourceManagerErrorCodeSummary { + SERIALIZATION_FAILED( + 16897, + "Unable to deserialize to object from string(json) in type: (序列化失败)", + "Unable to deserialize to object from string(json) in type: (序列化失败)"), + CANNOT_BE_SERIALIZATION(16898, "cannot be serialized (无法序列化)", "cannot be serialized (无法序列化)"), + CONNECTION_FAILED(99983, "Connection Failed(连接失败)", "Connection Failed(连接失败)"), + REMOTE_SERVICE_ERROR( + 99983, "Remote Service Error(远端服务出错, 联系运维处理)", "Remote Service Error(远端服务出错, 联系运维处理)"), + DATASOURCE_NOT_FOUND(99988, "datasource not found(未找到数据源)", "datasource not found(未找到数据源)"), + PARAM_VALIDATE_FAILED(99986, "Param Validate Failed(参数校验出错)", "Param Validate Failed(参数校验出错)"), + ENVID_ATYPICAL(99986, "envId atypical(环境非典型)", "envId atypical(环境非典型)"), + IS_NULL_MS(99986, "", ""), + EXPRESS_IS_NULL(99987, "", ""), + OPERATE_FILE_IN_REQUEST( + 99987, + "Fail to operate file in request(上传文件处理失败)", + "Fail to operate file in request(上传文件处理失败)"); + + /** 错误码 */ + private int errorCode; + /** 错误描述 */ + private String errorDesc; + /** 错误可能出现的原因 */ + private String comment; + + LinkisDatasourceManagerErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/src/main/java/org/apache/linkis/datasourcemanager/common/util/json/Json.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/src/main/java/org/apache/linkis/datasourcemanager/common/util/json/Json.java index 761d71a660b..a691e2373b7 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/src/main/java/org/apache/linkis/datasourcemanager/common/util/json/Json.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/common/src/main/java/org/apache/linkis/datasourcemanager/common/util/json/Json.java @@ -34,6 +34,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.linkis.datasourcemanager.common.errorcode.LinkisDatasourceManagerErrorCodeSummary.CANNOT_BE_SERIALIZATION; +import static org.apache.linkis.datasourcemanager.common.errorcode.LinkisDatasourceManagerErrorCodeSummary.SERIALIZATION_FAILED; + /** Json utils */ public class Json { private static final String PREFIX = "["; @@ -82,7 +85,7 @@ public static T fromJson(String json, Class clazz, Class... parameters + (null != clazz ? clazz.getSimpleName() : "UNKNOWN") + "], parameters size: " + parameters.length; - throw new JsonErrorException(-1, message, e); + throw new JsonErrorException(SERIALIZATION_FAILED.getErrorCode(), message, e); } } return null; @@ -105,7 +108,7 @@ public static T fromJson(InputStream stream, Class clazz, Class... par + (null != clazz ? clazz.getSimpleName() : "UNKNOWN") + "], parameters size: " + parameters.length; - throw new JsonErrorException(-1, message, e); + throw new JsonErrorException(SERIALIZATION_FAILED.getErrorCode(), message, e); } return fromJson(builder.toString(), clazz, parameters); } @@ -123,7 +126,7 @@ public static String toJson(Object obj, Class model) throws JsonErrorExceptio "Unable to serialize the object in type: [" + (null != model ? model.getSimpleName() : "UNKNOWN") + "]"; - throw new JsonErrorException(-1, message, e); + throw new JsonErrorException(CANNOT_BE_SERIALIZATION.getErrorCode(), message, e); } } return null; diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/pom.xml index 5f957e61f8e..0f3c2a12e00 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../../pom.xml linkis-datasource-manager-server diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/assembly/distribution.xml index 3f23a21c393..e75135259f1 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/assembly/distribution.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/assembly/distribution.xml @@ -16,10 +16,8 @@ ~ limitations under the License. --> - + linkis-datasource-manager-server zip diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/restful/DataSourceCoreRestfulApi.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/restful/DataSourceCoreRestfulApi.java index b0c02792f59..b96a489f37d 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/restful/DataSourceCoreRestfulApi.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/restful/DataSourceCoreRestfulApi.java @@ -60,6 +60,8 @@ import java.io.UnsupportedEncodingException; import java.util.*; +import static org.apache.linkis.datasourcemanager.common.errorcode.LinkisDatasourceManagerErrorCodeSummary.DATASOURCE_NOT_FOUND; + @Api(tags = "data source core restful api") @RestController @RequestMapping( @@ -258,8 +260,8 @@ public Message insertJsonParameter( dataSourceInfoService.getDataSourceInfoBrief(dataSourceId); if (null == dataSource) { throw new ErrorException( - ServiceErrorCode.DATASOURCE_NOTFOUND_ERROR.getValue(), - "datasource not found "); + DATASOURCE_NOT_FOUND.getErrorCode(), + DATASOURCE_NOT_FOUND.getErrorDesc()); } if (!AuthContext.hasPermission(dataSource, userName)) { return Message.error( diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/restful/DataSourceOperateRestfulApi.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/restful/DataSourceOperateRestfulApi.java index 3ce5fe3288a..30ab359f470 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/restful/DataSourceOperateRestfulApi.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/restful/DataSourceOperateRestfulApi.java @@ -55,6 +55,8 @@ import java.util.Map; import java.util.Set; +import static org.apache.linkis.datasourcemanager.common.errorcode.LinkisDatasourceManagerErrorCodeSummary.ENVID_ATYPICAL; + @Api(tags = "data source operate restful api") @RestController @RequestMapping( @@ -112,7 +114,7 @@ protected void doConnect(String operator, DataSource dataSource) throws ErrorExc Long.parseLong((String) dataSource.getConnectParams().get("envId")), dataSource); } catch (Exception e) { - throw new ParameterValidateException("envId atypical" + e); + throw new ParameterValidateException(ENVID_ATYPICAL.getErrorDesc() + e); } } List keyDefinitionList = diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/service/impl/BmlAppServiceImpl.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/service/impl/BmlAppServiceImpl.java index d85e3748b0d..fc0b86cccba 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/service/impl/BmlAppServiceImpl.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/service/impl/BmlAppServiceImpl.java @@ -23,7 +23,6 @@ import org.apache.linkis.bml.protocol.BmlUpdateResponse; import org.apache.linkis.bml.protocol.BmlUploadResponse; import org.apache.linkis.common.exception.ErrorException; -import org.apache.linkis.datasourcemanager.common.ServiceErrorCode; import org.apache.linkis.datasourcemanager.core.service.BmlAppService; import org.springframework.cloud.context.config.annotation.RefreshScope; @@ -36,6 +35,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.linkis.datasourcemanager.common.errorcode.LinkisDatasourceManagerErrorCodeSummary.EXPRESS_IS_NULL; + /** Wrap the communication between Bml service // TODO RPCException defined */ @Service @RefreshScope @@ -61,7 +62,7 @@ public String clientUploadResource(String userName, String fileName, InputStream try { BmlUploadResponse response = client.uploadResource(userName, fileName, inputStream); if (!response.isSuccess()) { - throw new ErrorException(ServiceErrorCode.BML_SERVICE_ERROR.getValue(), ""); + throw new ErrorException(EXPRESS_IS_NULL.getErrorCode(), ""); } return response.resourceId(); } catch (Exception e) { @@ -87,7 +88,7 @@ public void clientRemoveResource(String userName, String resourceId) throws Erro try { BmlDeleteResponse response = client.deleteResource(userName, resourceId); if (!response.isSuccess()) { - throw new ErrorException(ServiceErrorCode.BML_SERVICE_ERROR.getValue(), ""); + throw new ErrorException(EXPRESS_IS_NULL.getErrorCode(), ""); } } catch (Exception e) { LOG.error( @@ -114,7 +115,7 @@ public String clientUpdateResource(String userName, String resourceId, InputStre BmlUpdateResponse response = client.updateResource(userName, resourceId, "filename", inputStream); if (!response.isSuccess()) { - throw new ErrorException(ServiceErrorCode.BML_SERVICE_ERROR.getValue(), ""); + throw new ErrorException(EXPRESS_IS_NULL.getErrorCode(), ""); } return response.version(); } catch (Exception e) { diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/service/impl/MetadataOperateServiceImpl.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/service/impl/MetadataOperateServiceImpl.java index 4f9ef4e40ff..3602bea009f 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/service/impl/MetadataOperateServiceImpl.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/service/impl/MetadataOperateServiceImpl.java @@ -37,8 +37,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.linkis.datasourcemanager.common.ServiceErrorCode.BML_SERVICE_ERROR; -import static org.apache.linkis.datasourcemanager.common.ServiceErrorCode.REMOTE_METADATA_SERVICE_ERROR; +import static org.apache.linkis.datasourcemanager.common.errorcode.LinkisDatasourceManagerErrorCodeSummary.*; @Service public class MetadataOperateServiceImpl implements MetadataOperateService { @@ -77,8 +76,8 @@ public void doRemoteConnect( } catch (ErrorException e) { // TODO redefined a exception extends warnException throw new WarnException( - BML_SERVICE_ERROR.getValue(), - "Fail to operate file in request[上传文件处理失败]"); + OPERATE_FILE_IN_REQUEST.getErrorCode(), + OPERATE_FILE_IN_REQUEST.getErrorDesc()); } } } @@ -97,18 +96,18 @@ public void doRemoteConnect( MetadataResponse response = (MetadataResponse) object; if (!response.status()) { throw new WarnException( - REMOTE_METADATA_SERVICE_ERROR.getValue(), - "Connection Failed[连接失败], Msg[" + response.data() + "]"); + CONNECTION_FAILED.getErrorCode(), + CONNECTION_FAILED.getErrorDesc() + ", Msg[" + response.data() + "]"); } } else { throw new WarnException( - REMOTE_METADATA_SERVICE_ERROR.getValue(), "Remote Service Error[远端服务出错, 联系运维处理]"); + REMOTE_SERVICE_ERROR.getErrorCode(), REMOTE_SERVICE_ERROR.getErrorDesc()); } } catch (Exception t) { if (!(t instanceof WarnException)) { throw new WarnException( - REMOTE_METADATA_SERVICE_ERROR.getValue(), - "Remote Service Error[远端服务出错, 联系运维处理], message:[" + t.getMessage() + "]"); + REMOTE_SERVICE_ERROR.getErrorCode(), + REMOTE_SERVICE_ERROR.getErrorDesc() + ", message:[" + t.getMessage() + "]"); } throw t; } diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/DataSourceParameterValidator.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/DataSourceParameterValidator.java index 084624c5fa3..bc9a99819b2 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/DataSourceParameterValidator.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/DataSourceParameterValidator.java @@ -33,6 +33,8 @@ import java.util.Objects; import java.util.stream.Collectors; +import static org.apache.linkis.datasourcemanager.common.errorcode.LinkisDatasourceManagerErrorCodeSummary.PARAM_VALIDATE_FAILED; + @Component public class DataSourceParameterValidator implements ParameterValidator { @PostConstruct @@ -92,7 +94,8 @@ public void validate( if (null == keyValue || StringUtils.isBlank(String.valueOf(keyValue))) { if (def.isRequire()) { throw new ParameterValidateException( - "Param Validate Failed[参数校验出错], [the value of key: '" + PARAM_VALIDATE_FAILED.getErrorDesc() + + ", [the value of key: '" + keyName + " cannot be blank']"); } diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/ParameterValidateException.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/ParameterValidateException.java index 43ef2b5a1df..d63356749a2 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/ParameterValidateException.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/ParameterValidateException.java @@ -19,14 +19,14 @@ import org.apache.linkis.common.exception.ErrorException; -import static org.apache.linkis.datasourcemanager.common.ServiceErrorCode.PARAM_VALIDATE_ERROR; +import static org.apache.linkis.datasourcemanager.common.errorcode.LinkisDatasourceManagerErrorCodeSummary.IS_NULL_MS; public class ParameterValidateException extends ErrorException { public ParameterValidateException(String desc) { - super(PARAM_VALIDATE_ERROR.getValue(), desc); + super(IS_NULL_MS.getErrorCode(), desc); } public ParameterValidateException(String desc, String ip, int port, String serviceKind) { - super(PARAM_VALIDATE_ERROR.getValue(), desc, ip, port, serviceKind); + super(IS_NULL_MS.getErrorCode(), desc, ip, port, serviceKind); } } diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/strategy/RegExpParameterValidateStrategy.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/strategy/RegExpParameterValidateStrategy.java index cb9beb4684c..34d0ab383d1 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/strategy/RegExpParameterValidateStrategy.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/strategy/RegExpParameterValidateStrategy.java @@ -26,6 +26,8 @@ import java.util.List; import java.util.regex.Pattern; +import static org.apache.linkis.datasourcemanager.common.errorcode.LinkisDatasourceManagerErrorCodeSummary.PARAM_VALIDATE_FAILED; + /** RegExpression validate strategy */ public class RegExpParameterValidateStrategy implements ParameterValidateStrategy { @@ -64,7 +66,8 @@ private void match(String key, String name, String value, String valueRegex) boolean match = Pattern.matches(valueRegex, value); if (!match) { throw new ParameterValidateException( - "Param Validate Failed[参数校验出错], [the value: '" + PARAM_VALIDATE_FAILED.getErrorDesc() + + ", [the value: '" + String.valueOf(value) + "' to key: '" + key diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/strategy/TypeParameterValidateStrategy.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/strategy/TypeParameterValidateStrategy.java index 8a40e5407f5..f44ae5a1142 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/strategy/TypeParameterValidateStrategy.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/java/org/apache/linkis/datasourcemanager/core/validate/strategy/TypeParameterValidateStrategy.java @@ -26,6 +26,7 @@ import java.util.List; import java.util.Map; +import static org.apache.linkis.datasourcemanager.common.errorcode.LinkisDatasourceManagerErrorCodeSummary.PARAM_VALIDATE_FAILED; import static org.apache.linkis.datasourcemanager.core.formdata.CustomMultiPartFormDataTransformer.PrimitiveUtils; /** Type validate strategy */ @@ -44,7 +45,8 @@ public Object validate(DataSourceParamKeyDefinition keyDefinition, Object actual if (valueType == DataSourceParamKeyDefinition.ValueType.FILE) { if (!actualValue.getClass().equals(FormStreamContent.class)) { throw new ParameterValidateException( - "Param Validate Failed[参数校验出错], [the value of '" + PARAM_VALIDATE_FAILED.getErrorDesc() + + ", [the value of '" + keyDefinition.getKey() + "' must be 'File']"); } @@ -61,14 +63,16 @@ public Object validate(DataSourceParamKeyDefinition keyDefinition, Object actual } } catch (Exception e) { throw new ParameterValidateException( - "Param Validate Failed[参数校验出错], [type of value: '" + PARAM_VALIDATE_FAILED.getErrorDesc() + + ", [type of value: '" + actualValue + "' is not '" + javaType.getSimpleName() + "']"); } throw new ParameterValidateException( - "Param Validate Failed[参数校验出错], [type of value: '" + PARAM_VALIDATE_FAILED.getErrorDesc() + + ", [type of value: '" + actualValue + "' is not '" + javaType.getSimpleName() diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/java/org/apache/linkis/datasourcemanager/core/dao/DataSourceVersionDaoTest.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/java/org/apache/linkis/datasourcemanager/core/dao/DataSourceVersionDaoTest.java index 9006fdab2eb..1a9ea139ca2 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/java/org/apache/linkis/datasourcemanager/core/dao/DataSourceVersionDaoTest.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/java/org/apache/linkis/datasourcemanager/core/dao/DataSourceVersionDaoTest.java @@ -32,6 +32,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; +@Disabled class DataSourceVersionDaoTest extends BaseDaoTest { private static final Logger logger = LoggerFactory.getLogger(DataSourceVersionDaoTest.class); diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/pom.xml index 84a291554e8..a2d26d528ca 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../../pom.xml linkis-metadata-query-common @@ -54,6 +55,11 @@ linkis-datasource-manager-common ${project.version} + + + jakarta.annotation + jakarta.annotation-api + diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractCacheMetaService.java similarity index 62% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractCacheMetaService.java index bde9197718a..5af1a3fbece 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractMetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractCacheMetaService.java @@ -17,13 +17,10 @@ package org.apache.linkis.metadata.query.common.service; -import org.apache.linkis.common.exception.WarnException; import org.apache.linkis.datasourcemanager.common.util.json.Json; import org.apache.linkis.metadata.query.common.cache.CacheConfiguration; import org.apache.linkis.metadata.query.common.cache.CacheManager; import org.apache.linkis.metadata.query.common.cache.ConnCacheManager; -import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; -import org.apache.linkis.metadata.query.common.domain.MetaPartitionInfo; import org.apache.linkis.metadata.query.common.exception.MetaRuntimeException; import org.apache.commons.lang3.StringUtils; @@ -34,7 +31,7 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; -import java.util.List; +import java.util.Collections; import java.util.Map; import java.util.Objects; import java.util.concurrent.Callable; @@ -45,8 +42,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public abstract class AbstractMetaService implements MetadataService { - private static final Logger LOG = LoggerFactory.getLogger(AbstractMetaService.class); +/** + * Meta service use cache manager + * + * @param + */ +public abstract class AbstractCacheMetaService implements BaseMetadataService { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractCacheMetaService.class); + private static final String CONN_CACHE_REQ = "_STORED"; private CacheManager connCacheManager; @@ -62,140 +66,40 @@ public void init() { /** * If want to use cache component, you should invoke this in constructor method * - * @param cacheManager + * @param cacheManager cache manage */ protected void initCache(CacheManager cacheManager) { - String prefix = this.getClass().getSimpleName(); - reqCache = - cacheManager.buildCache( - prefix + CONN_CACHE_REQ, - notification -> { - assert notification.getValue() != null; - close(notification.getValue().getConnection()); - }); - // Clean up the req cache - reqCache.cleanUp(); - } - - @Override - public abstract MetadataConnection getConnection(String operator, Map params) - throws Exception; - - @Override - public List getDatabases(String operator, Map params) { - return this.getConnAndRun(operator, params, this::queryDatabases); - } - - @Override - public List getTables(String operator, Map params, String database) { - return this.getConnAndRun(operator, params, conn -> this.queryTables(conn, database)); - } - - @Override - public Map getTableProps( - String operator, Map params, String database, String table) { - return this.getConnAndRun( - operator, params, conn -> this.queryTableProps(conn, database, table)); - } - - @Override - public MetaPartitionInfo getPartitions( - String operator, - Map params, - String database, - String table, - boolean traverse) { - return this.getConnAndRun( - operator, params, conn -> this.queryPartitions(conn, database, table, traverse)); - } - - @Override - public List getColumns( - String operator, Map params, String database, String table) { - return this.getConnAndRun(operator, params, conn -> this.queryColumns(conn, database, table)); - } - - @Override - public Map getPartitionProps( - String operator, - Map params, - String database, - String table, - String partition) { - return this.getConnAndRun( - operator, params, conn -> this.queryPartitionProps(conn, database, table, partition)); - } - - /** - * Get database list by connection - * - * @param connection metadata connection - * @return - */ - public List queryDatabases(C connection) { - throw new WarnException(-1, "This method is no supported"); - } - - /** - * Get table list by connection and database - * - * @param connection metadata connection - * @param database database - * @return - */ - public List queryTables(C connection, String database) { - throw new WarnException(-1, "This method is no supported"); - } - - /** - * Get partitions by connection, database and table - * - * @param connection metadata connection - * @param database database - * @param table table - * @return - */ - public MetaPartitionInfo queryPartitions( - C connection, String database, String table, boolean traverse) { - throw new WarnException(-1, "This method is no supported"); + if (useCache()) { + String prefix = this.getClass().getSimpleName(); + reqCache = + cacheManager.buildCache( + prefix + CONN_CACHE_REQ, + notification -> { + assert notification.getValue() != null; + close(notification.getValue().getConnection()); + }); + // Clean up the req cache + reqCache.cleanUp(); + } } /** - * Get columns by connection, database and table + * If use the cache * - * @param connection metadata connection - * @param database database - * @param table table - * @return + * @return boolean */ - public List queryColumns(C connection, String database, String table) { - throw new WarnException(-1, "This method is no supported"); + protected boolean useCache() { + return true; } - /** - * Get the properties of partition - * - * @param connection - * @param database - * @param table - * @param partition - * @return - */ - public Map queryPartitionProps( - C connection, String database, String table, String partition) { - throw new WarnException(-1, "This method is no supported"); - } + @Override + public abstract MetadataConnection getConnection(String operator, Map params) + throws Exception; - /** - * Get table properties - * - * @param connection metadata connection - * @param database database - * @param table table - * @return - */ - public Map queryTableProps(C connection, String database, String table) { - throw new WarnException(-1, "This method is no supported"); + public Map getConnectionInfo( + String operator, Map params, Map queryParams) { + return this.getConnAndRun( + operator, params, connection -> this.queryConnectionInfo(connection, queryParams)); } public void close(C connection) { @@ -207,12 +111,25 @@ public void close(C connection) { } } + /** + * Get connection information + * + * @param connection connection + * @param queryParams query params + * @return map + */ + public Map queryConnectionInfo(C connection, Map queryParams) { + return Collections.emptyMap(); + } + protected R getConnAndRun( String operator, Map params, Function action) { String cacheKey = ""; MetadataConnection connection = null; try { cacheKey = md5String(Json.toJson(params, null), "", 2); + // Dive the cache by operator/creator + cacheKey = operator + "_" + md5String(Json.toJson(params, null), "", 2); if (null != reqCache) { ConnectionCache connectionCache = getConnectionInCache(reqCache, cacheKey, () -> getConnection(operator, params)); diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractDbMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractDbMetaService.java new file mode 100644 index 00000000000..3e113909d1e --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractDbMetaService.java @@ -0,0 +1,152 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.common.service; + +import org.apache.linkis.common.exception.WarnException; +import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; +import org.apache.linkis.metadata.query.common.domain.MetaPartitionInfo; + +import java.io.Closeable; +import java.util.List; +import java.util.Map; + +/** + * Database meta service + * + * @param + */ +public abstract class AbstractDbMetaService extends AbstractCacheMetaService + implements MetadataDbService { + + @Override + public List getDatabases(String operator, Map params) { + return this.getConnAndRun(operator, params, this::queryDatabases); + } + + @Override + public List getTables(String operator, Map params, String database) { + return this.getConnAndRun(operator, params, conn -> this.queryTables(conn, database)); + } + + @Override + public Map getTableProps( + String operator, Map params, String database, String table) { + return this.getConnAndRun( + operator, params, conn -> this.queryTableProps(conn, database, table)); + } + + @Override + public MetaPartitionInfo getPartitions( + String operator, + Map params, + String database, + String table, + boolean traverse) { + return this.getConnAndRun( + operator, params, conn -> this.queryPartitions(conn, database, table, traverse)); + } + + @Override + public List getColumns( + String operator, Map params, String database, String table) { + return this.getConnAndRun(operator, params, conn -> this.queryColumns(conn, database, table)); + } + + @Override + public Map getPartitionProps( + String operator, + Map params, + String database, + String table, + String partition) { + return this.getConnAndRun( + operator, params, conn -> this.queryPartitionProps(conn, database, table, partition)); + } + + /** + * Get database list by connection + * + * @param connection metadata connection + * @return + */ + public List queryDatabases(C connection) { + throw new WarnException(-1, "This method is no supported"); + } + + /** + * Get table list by connection and database + * + * @param connection metadata connection + * @param database database + * @return + */ + public List queryTables(C connection, String database) { + throw new WarnException(-1, "This method is no supported"); + } + + /** + * Get partitions by connection, database and table + * + * @param connection metadata connection + * @param database database + * @param table table + * @return + */ + public MetaPartitionInfo queryPartitions( + C connection, String database, String table, boolean traverse) { + throw new WarnException(-1, "This method is no supported"); + } + + /** + * Get columns by connection, database and table + * + * @param connection metadata connection + * @param database database + * @param table table + * @return + */ + public List queryColumns(C connection, String database, String table) { + throw new WarnException(-1, "This method is no supported"); + } + + /** + * Get the properties of partition + * + * @param connection + * @param database + * @param table + * @param partition + * @return + */ + public Map queryPartitionProps( + C connection, String database, String table, String partition) { + throw new WarnException(-1, "This method is no supported"); + } + + /** + * Get table properties + * + * @param connection metadata connection + * @param database database + * @param table table + * @return + */ + public Map queryTableProps(C connection, String database, String table) { + throw new WarnException(-1, "This method is no supported"); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractFsMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractFsMetaService.java new file mode 100644 index 00000000000..610896bb087 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/AbstractFsMetaService.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.common.service; + +import java.io.Closeable; +import java.util.Map; + +/** + * Filesystem meta service + * + * @param + */ +public abstract class AbstractFsMetaService extends AbstractCacheMetaService + implements MetadataFsService { + @Override + public String getSchema(String operator, Map params) { + return this.getConnAndRun(operator, params, this::getSchema); + } + + public String getSchema(C connection) { + return ""; + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/BaseMetadataService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/BaseMetadataService.java index e2cf804b35f..e090d6cac1b 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/BaseMetadataService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/BaseMetadataService.java @@ -18,6 +18,7 @@ package org.apache.linkis.metadata.query.common.service; import java.io.Closeable; +import java.util.Collections; import java.util.Map; public interface BaseMetadataService { @@ -30,4 +31,17 @@ public interface BaseMetadataService { */ MetadataConnection getConnection(String operator, Map params) throws Exception; + + /** + * Get connection information (default empty) + * + * @param operator operator + * @param params connect params + * @param queryParams query params + * @return information + */ + default Map getConnectionInfo( + String operator, Map params, Map queryParams) { + return Collections.emptyMap(); + } } diff --git a/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/exception/OpenLooKengSourceGroupException.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/MetadataFsService.java similarity index 70% rename from linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/exception/OpenLooKengSourceGroupException.java rename to linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/MetadataFsService.java index 236b527931b..1c3bcb624df 100644 --- a/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/exception/OpenLooKengSourceGroupException.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/MetadataFsService.java @@ -14,14 +14,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package org.apache.linkis.metadata.query.common.service; -package org.apache.linkis.engineplugin.openlookeng.exception; +import java.util.Map; -import org.apache.linkis.common.exception.ErrorException; +/** Metadata Fs service */ +public interface MetadataFsService extends BaseMetadataService { -public class OpenLooKengSourceGroupException extends ErrorException { - - public OpenLooKengSourceGroupException(int errCode, String desc) { - super(errCode, desc); + /** + * Get the File System schema + * + * @return schema name + */ + default String getSchema(String operator, Map params) { + return "file"; } } diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/MdmConfigurationTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/MdmConfigurationTest.java new file mode 100644 index 00000000000..b37ea144b88 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/MdmConfigurationTest.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.common; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class MdmConfigurationTest { + + @Test + @DisplayName("constTest") + public void constTest() { + + String metaDataServiceApplication = MdmConfiguration.METADATA_SERVICE_APPLICATION.getValue(); + String dataSourceServiceApplication = + MdmConfiguration.DATA_SOURCE_SERVICE_APPLICATION.getValue(); + + Assertions.assertNotNull(metaDataServiceApplication); + Assertions.assertNotNull(dataSourceServiceApplication); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/cache/CacheConfigurationTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/cache/CacheConfigurationTest.java new file mode 100644 index 00000000000..0afa7eb1657 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/cache/CacheConfigurationTest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.common.cache; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class CacheConfigurationTest { + + @Test + @DisplayName("constTest") + public void constTest() { + + Long cacheExpireTimeValue = CacheConfiguration.CACHE_EXPIRE_TIME.getValue(); + Integer cacheInPoolSizeValue = CacheConfiguration.CACHE_IN_POOL_SIZE.getValue(); + Long cacheMaxSizeValue = CacheConfiguration.CACHE_MAX_SIZE.getValue(); + String mysqlRelationshipListValue = CacheConfiguration.MYSQL_RELATIONSHIP_LIST.getValue(); + + Assertions.assertTrue(cacheExpireTimeValue.longValue() == 600L); + Assertions.assertTrue(cacheInPoolSizeValue == 5); + Assertions.assertTrue(cacheMaxSizeValue == 1000L); + Assertions.assertNotNull(mysqlRelationshipListValue); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/cache/ConnCacheManagerTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/cache/ConnCacheManagerTest.java new file mode 100644 index 00000000000..b7efa884646 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/cache/ConnCacheManagerTest.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.common.cache; + +import com.google.common.cache.Cache; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ConnCacheManagerTest { + + @Test + @DisplayName("customTest") + public void customTest() { + + CacheManager cacheManager = ConnCacheManager.custom(); + Assertions.assertNotNull(cacheManager); + } + + @Test + @DisplayName("buildCacheTest") + public void buildCacheTest() { + + CacheManager cacheManager = ConnCacheManager.custom(); + Cache cache = + cacheManager.buildCache( + "key", + notification -> { + assert notification.getValue() != null; + }); + Assertions.assertNotNull(cache); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/exception/MetaMethodInvokeExceptionTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/exception/MetaMethodInvokeExceptionTest.java new file mode 100644 index 00000000000..6af95713765 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/exception/MetaMethodInvokeExceptionTest.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.common.exception; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class MetaMethodInvokeExceptionTest { + + @Test + @DisplayName("metaMethodInvokeExceptionTest") + public void metaMethodInvokeExceptionTest() { + String errorMsg = "Load meta service for mysql ail load [mysql] metadata service failed"; + MetaMethodInvokeException exception = + new MetaMethodInvokeException(500, errorMsg, new Exception()); + Assertions.assertEquals(errorMsg, exception.getDesc()); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/exception/MetaRuntimeExceptionTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/exception/MetaRuntimeExceptionTest.java new file mode 100644 index 00000000000..20b647e1d03 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/test/java/org/apache/linkis/metadata/query/common/exception/MetaRuntimeExceptionTest.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.common.exception; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class MetaRuntimeExceptionTest { + + @Test + @DisplayName("metaRuntimeExceptionTest") + public void metaRuntimeExceptionTest() { + + String errorMsg = "Cannot find the keytab file in connect parameters"; + MetaRuntimeException exception = new MetaRuntimeException(errorMsg, new Exception()); + Assertions.assertEquals(errorMsg, exception.getDesc()); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/pom.xml index fae88b3848c..04a60ffa100 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../../pom.xml linkis-metadata-query-server diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/assembly/distribution.xml index 9611fd8cad0..7c94aa85845 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/assembly/distribution.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/assembly/distribution.xml @@ -16,10 +16,8 @@ ~ limitations under the License. --> - + linkis-metadata-query-server zip @@ -97,6 +95,17 @@ *-javadoc.jar + + ${basedir}/../service/hdfs/target/out/lib + 0755 + lib/service/hdfs + + *.jar + + + *-javadoc.jar + + diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/loader/MetaClassLoaderManager.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/loader/MetaClassLoaderManager.java index 6a8bc2042f8..09776998013 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/loader/MetaClassLoaderManager.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/loader/MetaClassLoaderManager.java @@ -22,8 +22,8 @@ import org.apache.linkis.common.exception.ErrorException; import org.apache.linkis.metadata.query.common.cache.CacheConfiguration; import org.apache.linkis.metadata.query.common.exception.MetaRuntimeException; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; -import org.apache.linkis.metadata.query.common.service.MetadataService; +import org.apache.linkis.metadata.query.common.service.AbstractCacheMetaService; +import org.apache.linkis.metadata.query.common.service.BaseMetadataService; import org.apache.linkis.metadata.query.server.utils.MetadataUtils; import org.apache.commons.lang3.StringUtils; @@ -54,7 +54,7 @@ public class MetaClassLoaderManager { CommonVars.apply( "wds.linkis.server.mdm.service.lib.dir", Configuration.getLinkisHome() - + "/lib/linkis-public-enhancements/linkis-ps-metadataquery/service"); + + "/lib/linkis-public-enhancements/linkis-ps-publicservice/metadataquery-service"); public static CommonVars INSTANCE_EXPIRE_TIME = CommonVars.apply("wds.linkis.server.mdm.service.instance.expire-in-seconds", 60); @@ -127,17 +127,17 @@ public BiFunction getInvoker(String dsType) throws Err String prefix = dsType.substring(0, 1).toUpperCase() + dsType.substring(1); expectClassName = String.format(META_CLASS_NAME, prefix); } - Class metaServiceClass = + Class metaServiceClass = searchForLoadMetaServiceClass(metaClassLoader, expectClassName, true); if (Objects.isNull(metaServiceClass)) { throw new MetaRuntimeException( "Fail to init and load meta service class for type: [" + dsType + "]", null); } - MetadataService metadataService = + BaseMetadataService metadataService = MetadataUtils.loadMetaService(metaServiceClass, metaClassLoader); - if (metadataService instanceof AbstractMetaService) { + if (metadataService instanceof AbstractCacheMetaService) { LOG.info("Invoke the init() method in meta service for type: [" + dsType + "]"); - ((AbstractMetaService) metadataService).init(); + ((AbstractCacheMetaService) metadataService).init(); } return new MetaServiceInstance(metadataService, metaClassLoader); }); @@ -148,12 +148,65 @@ public BiFunction getInvoker(String dsType) throws Err ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(finalServiceInstance.metaClassLoader); - Method method = + List methodsMatched = Arrays.stream(childMethods) - .filter(eachMethod -> eachMethod.getName().equals(m)) - .collect(Collectors.toList()) - .get(0); - return method.invoke(finalServiceInstance.serviceInstance, args); + .filter( + eachMethod -> { + if (eachMethod.getName().equals(m)) { + Class[] parameterType = eachMethod.getParameterTypes(); + if (parameterType.length == args.length) { + for (int i = 0; i < parameterType.length; i++) { + if (Objects.nonNull(args[i])) { + boolean matches = + parameterType[i].isAssignableFrom(args[i].getClass()) + || ((args[i].getClass().isPrimitive() + || parameterType[i].isPrimitive()) + && MetadataUtils.getPrimitive(args[i].getClass()) + == MetadataUtils.getPrimitive(parameterType[i])); + if (!matches) { + return false; + } + } + } + return true; + } + } + return false; + }) + .collect(Collectors.toList()); + if (methodsMatched.isEmpty()) { + String type = null; + if (Objects.nonNull(args)) { + type = + Arrays.stream(args) + .map(arg -> Objects.nonNull(arg) ? arg.getClass().toString() : "null") + .collect(Collectors.joining(",")); + } + String message = + "Unknown method: [ name: " + + m + + ", type: [" + + type + + "]] for meta service instance: [" + + finalServiceInstance.getServiceInstance().toString() + + "]"; + LOG.warn(message); + throw new MetaRuntimeException(message, null); + } else if (methodsMatched.size() > 1) { + LOG.warn( + "Find multiple matched methods with name: [" + + m + + "] such as: \n" + + methodsMatched.stream() + .map( + method -> + method.getName() + ":" + Arrays.toString(method.getParameterTypes())) + .collect(Collectors.joining("\n")) + + "\n in meta service instance: [" + + finalServiceInstance.getServiceInstance().toString() + + "], will choose the first one"); + } + return methodsMatched.get(0).invoke(finalServiceInstance.serviceInstance, args); } catch (Exception e) { Throwable t = e; // UnWrap the Invocation target exception @@ -174,12 +227,12 @@ public BiFunction getInvoker(String dsType) throws Err }; } - private Class searchForLoadMetaServiceClass( + private Class searchForLoadMetaServiceClass( ClassLoader classLoader, String expectClassName, boolean initialize) { ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(classLoader); try { - Class metaClass = null; + Class metaClass = null; if (StringUtils.isNotBlank(expectClassName)) { metaClass = MetadataUtils.loadMetaServiceClass( @@ -228,7 +281,7 @@ private List getJarsUrlsOfPath(String path) throws MalformedURLException { /** ServiceInstance Holder */ public static class MetaServiceInstance { - private MetadataService serviceInstance; + private BaseMetadataService serviceInstance; private Method[] methods; @@ -236,14 +289,14 @@ public static class MetaServiceInstance { private long initTimeStamp = 0L; - public MetaServiceInstance(MetadataService serviceInstance, ClassLoader metaClassLoader) { + public MetaServiceInstance(BaseMetadataService serviceInstance, ClassLoader metaClassLoader) { this.serviceInstance = serviceInstance; this.metaClassLoader = metaClassLoader; this.methods = serviceInstance.getClass().getMethods(); this.initTimeStamp = System.currentTimeMillis(); } - public MetadataService getServiceInstance() { + public BaseMetadataService getServiceInstance() { return serviceInstance; } } diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/restful/MetadataCoreRestful.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/restful/MetadataCoreRestful.java index d8ee38daace..c454918bc73 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/restful/MetadataCoreRestful.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/restful/MetadataCoreRestful.java @@ -23,6 +23,7 @@ import org.apache.linkis.metadata.query.common.domain.MetaPartitionInfo; import org.apache.linkis.metadata.query.common.exception.MetaMethodInvokeException; import org.apache.linkis.metadata.query.server.service.MetadataQueryService; +import org.apache.linkis.metadata.query.server.utils.MetadataUtils; import org.apache.linkis.server.Message; import org.apache.linkis.server.security.SecurityFilter; @@ -38,6 +39,7 @@ import java.util.List; import java.util.Map; +import java.util.regex.Matcher; @RestController @RequestMapping(value = "/metadatamanager") @@ -57,6 +59,10 @@ public Message getDatabases( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceId).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } + List databases = metadataAppService.getDatabasesByDsId( dataSourceId, system, SecurityFilter.getLoginUsername(request)); @@ -82,6 +88,12 @@ public Message getTables( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } + if (!MetadataUtils.nameRegexPattern.matcher(database).matches()) { + return Message.error("'database' is invalid[数据库名称错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceId).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } List tables = metadataAppService.getTablesByDsId( dataSourceId, @@ -116,6 +128,16 @@ public Message getTableProps( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } + if (!MetadataUtils.nameRegexPattern.matcher(database).matches()) { + return Message.error("'database' is invalid[数据库名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(table).matches()) { + return Message.error("'table' is invalid[表名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceId).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } + Map tableProps = metadataAppService.getTablePropsByDsId( dataSourceId, @@ -156,6 +178,15 @@ public Message getPartitions( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } + if (!MetadataUtils.nameRegexPattern.matcher(database).matches()) { + return Message.error("'database' is invalid[数据库名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(table).matches()) { + return Message.error("'table' is invalid[表名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceId).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } MetaPartitionInfo partitionInfo = metadataAppService.getPartitionsByDsId( dataSourceId, @@ -195,6 +226,18 @@ public Message getPartitionProps( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } + if (!MetadataUtils.nameRegexPattern.matcher(database).matches()) { + return Message.error("'database' is invalid[数据库名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(table).matches()) { + return Message.error("'table' is invalid[表名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceId).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(partition).matches()) { + return Message.error("'partition' is invalid[partition错误]"); + } Map partitionProps = metadataAppService.getPartitionPropsByDsId( dataSourceId, @@ -235,6 +278,15 @@ public Message getColumns( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } + if (!MetadataUtils.nameRegexPattern.matcher(database).matches()) { + return Message.error("'database' is invalid[数据库名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(table).matches()) { + return Message.error("'table' is invalid[表名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceId).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } List columns = metadataAppService.getColumnsByDsId( dataSourceId, diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/restful/MetadataQueryRestful.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/restful/MetadataQueryRestful.java index fdf1d7c6adf..0699bf346de 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/restful/MetadataQueryRestful.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/restful/MetadataQueryRestful.java @@ -23,6 +23,7 @@ import org.apache.linkis.metadata.query.common.domain.MetaPartitionInfo; import org.apache.linkis.metadata.query.common.exception.MetaMethodInvokeException; import org.apache.linkis.metadata.query.server.service.MetadataQueryService; +import org.apache.linkis.metadata.query.server.utils.MetadataUtils; import org.apache.linkis.server.Message; import org.apache.linkis.server.security.SecurityFilter; @@ -42,6 +43,7 @@ import java.util.List; import java.util.Map; +import java.util.stream.Collectors; @Api(tags = "metadata query") @RestController @@ -52,6 +54,32 @@ public class MetadataQueryRestful { @Autowired private MetadataQueryService metadataQueryService; + @RequestMapping(value = "/getConnectionInfo", method = RequestMethod.GET) + public Message getConnectionInfo( + @RequestParam("dataSourceName") String dataSourceName, + @RequestParam("system") String system, + HttpServletRequest request){ + try { + if (StringUtils.isBlank(system)){ + return Message.error("'system' is missing[缺少系统名]"); + } + Map queryParams = request.getParameterMap().entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, + entry -> StringUtils.join(entry.getValue(), ","))); + Map info = metadataQueryService.getConnectionInfoByDsName(dataSourceName, queryParams, + system, SecurityFilter.getLoginUsername(request)); + return Message.ok().data("info", info); + } catch (Exception e){ + return errorToResponseMessage( + "Fail to get connection info [获得连接信息失败], name: [" + + dataSourceName + + "], system:[" + + system + + "]", + e); + } + } + @ApiOperation(value = "getDatabases", notes = "get databases", response = Message.class) @ApiImplicitParams({ @ApiImplicitParam(name = "dataSourceName", required = true, dataType = "String", value = "data source name"), @@ -66,7 +94,9 @@ public Message getDatabases( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } - + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceName).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } List databases = metadataQueryService.getDatabasesByDsName( dataSourceName, system, SecurityFilter.getLoginUsername(request)); @@ -98,6 +128,12 @@ public Message getTables( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceName).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(database).matches()) { + return Message.error("'database' is invalid[数据库名称错误]"); + } List tables = metadataQueryService.getTablesByDsName( dataSourceName, @@ -137,6 +173,15 @@ public Message getTableProps( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } + if (!MetadataUtils.nameRegexPattern.matcher(database).matches()) { + return Message.error("'database' is invalid[数据库名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(table).matches()) { + return Message.error("'table' is invalid[表名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceName).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } Map tableProps = metadataQueryService.getTablePropsByDsName( dataSourceName, @@ -181,6 +226,15 @@ public Message getPartitions( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } + if (!MetadataUtils.nameRegexPattern.matcher(database).matches()) { + return Message.error("'database' is invalid[数据库名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(table).matches()) { + return Message.error("'table' is invalid[表名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceName).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } MetaPartitionInfo partitionInfo = metadataQueryService.getPartitionsByDsName( dataSourceName, @@ -226,6 +280,18 @@ public Message getPartitionProps( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } + if (!MetadataUtils.nameRegexPattern.matcher(database).matches()) { + return Message.error("'database' is invalid[数据库名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(table).matches()) { + return Message.error("'table' is invalid[表名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceName).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(partition).matches()) { + return Message.error("'partition' is invalid[partition错误]"); + } Map partitionProps = metadataQueryService.getPartitionPropsByDsName( dataSourceName, @@ -271,6 +337,15 @@ public Message getColumns( if (StringUtils.isBlank(system)) { return Message.error("'system' is missing[缺少系统名]"); } + if (!MetadataUtils.nameRegexPattern.matcher(database).matches()) { + return Message.error("'database' is invalid[数据库名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(table).matches()) { + return Message.error("'table' is invalid[表名错误]"); + } + if (!MetadataUtils.nameRegexPattern.matcher(dataSourceName).matches()) { + return Message.error("'dataSourceId' is invalid[数据源错误]"); + } List columns = metadataQueryService.getColumnsByDsName( dataSourceName, diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/service/MetadataQueryService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/service/MetadataQueryService.java index cba871ef5c5..eedd1a0a3e2 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/service/MetadataQueryService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/service/MetadataQueryService.java @@ -114,6 +114,19 @@ List getColumnsByDsId( String dataSourceId, String database, String table, String system, String userName) throws ErrorException; + /** + * Get connection information + * + * @param dataSourceName data source name + * @param queryParams query params + * @param system system + * @param userName user + * @return + */ + Map getConnectionInfoByDsName( + String dataSourceName, Map queryParams, String system, String userName) + throws ErrorException; + /** * @param dataSourceName data source name * @param system system diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/service/impl/MetadataQueryServiceImpl.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/service/impl/MetadataQueryServiceImpl.java index a4e0a81db82..e6ed0838fa7 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/service/impl/MetadataQueryServiceImpl.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/service/impl/MetadataQueryServiceImpl.java @@ -18,7 +18,9 @@ package org.apache.linkis.metadata.query.server.service.impl; import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.datasourcemanager.common.DataSources; import org.apache.linkis.datasourcemanager.common.auth.AuthContext; +import org.apache.linkis.datasourcemanager.common.domain.DataSource; import org.apache.linkis.datasourcemanager.common.protocol.DsInfoQueryRequest; import org.apache.linkis.datasourcemanager.common.protocol.DsInfoResponse; import org.apache.linkis.metadata.query.common.MdmConfiguration; @@ -196,6 +198,21 @@ public List getDatabasesByDsName(String dataSourceName, String system, S return new ArrayList<>(); } + @Override + public Map getConnectionInfoByDsName( + String dataSourceName, Map queryParams, String system, String userName) + throws ErrorException { + DsInfoResponse dsInfoResponse = queryDataSourceInfoByName(dataSourceName, system, userName); + if (StringUtils.isNotBlank(dsInfoResponse.dsType())) { + return invokeMetaMethod( + dsInfoResponse.dsType(), + "getConnectionInfo", + new Object[] {dsInfoResponse.creator(), dsInfoResponse.params(), queryParams}, + Map.class); + } + return new HashMap<>(); + } + @Override public List getTablesByDsName( String dataSourceName, String database, String system, String userName) @@ -333,8 +350,14 @@ public DsInfoResponse reqToGetDataSourceInfo(String dataSourceId, String system, public DsInfoResponse queryDataSourceInfoByName( String dataSourceName, String system, String userName) throws ErrorException { Object rpcResult = null; + boolean useDefault = false; try { - rpcResult = dataSourceRpcSender.ask(new DsInfoQueryRequest(null, dataSourceName, system)); + rpcResult = reqGetDefaultDataSource(dataSourceName); + if (Objects.isNull(rpcResult)) { + rpcResult = dataSourceRpcSender.ask(new DsInfoQueryRequest(null, dataSourceName, system)); + } else { + useDefault = true; + } } catch (Exception e) { throw new ErrorException(-1, "Remote Service Error[远端服务出错, 联系运维处理]"); } @@ -349,7 +372,7 @@ public DsInfoResponse queryDataSourceInfoByName( && userName.equals(response.creator()))); if (!hasPermission) { throw new ErrorException(-1, "Don't have query permission for data source [没有数据源的查询权限]"); - } else if (response.params().isEmpty()) { + } else if (!useDefault && response.params().isEmpty()) { throw new ErrorException(-1, "Have you published the data source? [数据源未发布或者参数为空]"); } return response; @@ -358,6 +381,23 @@ public DsInfoResponse queryDataSourceInfoByName( } } + /** + * Request to get default data source + * + * @param dataSourceName data source name + * @return response + */ + private DsInfoResponse reqGetDefaultDataSource(String dataSourceName) { + DataSource dataSource = DataSources.getDefault(dataSourceName); + return (Objects.nonNull(dataSource)) + ? new DsInfoResponse( + true, + dataSource.getDataSourceType().getName(), + dataSource.getConnectParams(), + dataSource.getCreateUser()) + : null; + } + /** * Invoke method in meta service * diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/utils/MetadataUtils.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/utils/MetadataUtils.java index fefd2923614..fc748ea8486 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/utils/MetadataUtils.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/utils/MetadataUtils.java @@ -17,8 +17,9 @@ package org.apache.linkis.metadata.query.server.utils; +import org.apache.linkis.common.conf.CommonVars; import org.apache.linkis.metadata.query.common.exception.MetaRuntimeException; -import org.apache.linkis.metadata.query.common.service.MetadataService; +import org.apache.linkis.metadata.query.common.service.BaseMetadataService; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; @@ -33,6 +34,7 @@ import java.util.function.Function; import java.util.jar.JarEntry; import java.util.jar.JarFile; +import java.util.regex.Pattern; import java.util.stream.Collectors; import org.slf4j.Logger; @@ -47,8 +49,36 @@ public class MetadataUtils { private static final Logger LOG = LoggerFactory.getLogger(MetadataUtils.class); - public static MetadataService loadMetaService( - Class metaServiceClass, ClassLoader metaServiceClassLoader) { + public static final String NAME_REGEX = + CommonVars.apply("wds.linkis.metadata.query.regex", "^[a-zA-Z\\-\\d_\\.=/:]+$").getValue(); + + public static final Pattern nameRegexPattern = Pattern.compile(NAME_REGEX); + + /** + * Get the primitive class + * + * @param clazz class + * @return return + */ + public static Class getPrimitive(Class clazz) { + try { + Class primitive = null; + if (clazz.isPrimitive()) { + primitive = clazz; + } else { + Class innerType = ((Class) clazz.getField("TYPE").get(null)); + if (innerType.isPrimitive()) { + primitive = innerType; + } + } + return primitive; + } catch (NoSuchFieldException | IllegalAccessException e) { + return null; + } + } + + public static BaseMetadataService loadMetaService( + Class metaServiceClass, ClassLoader metaServiceClassLoader) { ClassLoader storeClassLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(metaServiceClassLoader); try { @@ -66,7 +96,7 @@ public static MetadataService loadMetaService( // Choose the first one Constructor constructor = acceptConstructor.get(0); try { - return (MetadataService) constructor.newInstance(); + return (BaseMetadataService) constructor.newInstance(); } catch (Exception e) { throw new MetaRuntimeException( "Unable to construct meta service class: [" + metaServiceClass.getName() + "]", e); @@ -103,13 +133,14 @@ public static String[] searchMetaServiceClassInLoader(URLClassLoader serviceClas return classNameList.toArray(new String[] {}); } - public static Class loadMetaServiceClass( + public static Class loadMetaServiceClass( ClassLoader classLoader, String className, boolean initialize, String notFoundMessage) { // Try to load use expectClassName try { - return Class.forName(className, initialize, classLoader).asSubclass(MetadataService.class); + return Class.forName(className, initialize, classLoader) + .asSubclass(BaseMetadataService.class); } catch (ClassNotFoundException ne) { - LOG.warn(notFoundMessage, ne); + LOG.warn(notFoundMessage); } return null; } @@ -165,6 +196,6 @@ private static boolean isSubMetaServiceClass(String className, ClassLoader servi LOG.trace("Class: {} can not be found", className, t); return false; } - return MetadataService.class.isAssignableFrom(clazz); + return BaseMetadataService.class.isAssignableFrom(clazz); } } diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/test/java/org/apache/linkis/metadata/query/server/loader/MetaClassLoaderManagerTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/test/java/org/apache/linkis/metadata/query/server/loader/MetaClassLoaderManagerTest.java new file mode 100644 index 00000000000..1bb96542021 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/test/java/org/apache/linkis/metadata/query/server/loader/MetaClassLoaderManagerTest.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.server.loader; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class MetaClassLoaderManagerTest { + + @Test + @DisplayName("constTest") + public void constTest() { + + Integer instanceExpireTimeValue = MetaClassLoaderManager.INSTANCE_EXPIRE_TIME.getValue(); + String libDirValue = MetaClassLoaderManager.LIB_DIR.getValue(); + + Assertions.assertTrue(instanceExpireTimeValue.intValue() == 60); + Assertions.assertNotNull(libDirValue); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/test/java/org/apache/linkis/metadata/query/server/receiver/BaseMetaReceiverTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/test/java/org/apache/linkis/metadata/query/server/receiver/BaseMetaReceiverTest.java new file mode 100644 index 00000000000..64acfc6d158 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/test/java/org/apache/linkis/metadata/query/server/receiver/BaseMetaReceiverTest.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.server.receiver; + +import org.apache.linkis.metadata.query.common.protocol.MetadataConnect; +import org.apache.linkis.metadata.query.common.protocol.MetadataResponse; +import org.apache.linkis.metadata.query.server.WebApplicationServer; +import org.apache.linkis.metadata.query.server.service.impl.MetadataQueryServiceImpl; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import java.util.HashMap; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mockito; + +@ExtendWith({SpringExtension.class}) +@AutoConfigureMockMvc +@SpringBootTest(classes = {WebApplicationServer.class}) +public class BaseMetaReceiverTest { + + @Autowired private BaseMetaReceiver baseMetaReceiver; + + @MockBean(name = "metadataQueryServiceImpl") + MetadataQueryServiceImpl metadataQueryServiceImpl; + + @Test + @DisplayName("dealMetadataConnectRequestTest") + public void dealMetadataConnectRequestTest() throws Exception { + + MetadataConnect metadataConnect = new MetadataConnect("mysql", "query", new HashMap<>(), "1"); + Mockito.doNothing() + .when(metadataQueryServiceImpl) + .getConnection(Mockito.anyString(), Mockito.anyString(), Mockito.anyMap()); + MetadataResponse response = baseMetaReceiver.dealMetadataConnectRequest(metadataConnect); + + Assertions.assertTrue(response.status()); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/test/resources/application.properties b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/test/resources/application.properties index 886ae24f3a1..5572769a68d 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/test/resources/application.properties @@ -1,19 +1,17 @@ -/* -* Licensed to the Apache Software Foundation (ASF) under one or more -* contributor license agreements. See the NOTICE file distributed with -* this work for additional information regarding copyright ownership. -* The ASF licenses this file to You under the Apache License, Version 2.0 -* (the "License"); you may not use this file except in compliance with -* the License. You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# #disable eureka discovery client spring.cloud.service-registry.auto-registration.enabled=false diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/pom.xml index c291ef11d33..571375d965a 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../../../pom.xml linkis-metadata-query-service-es diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/assembly/distribution.xml index b5f688dabad..b463956e453 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/assembly/distribution.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/assembly/distribution.xml @@ -16,10 +16,8 @@ ~ limitations under the License. --> - + linkis-metadata-query-service-es zip diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/EsMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/EsMetaService.java index 74bcbb192da..b66a26f010b 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/EsMetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/EsMetaService.java @@ -19,14 +19,14 @@ import org.apache.linkis.datasourcemanager.common.util.json.Json; import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; +import org.apache.linkis.metadata.query.common.service.AbstractDbMetaService; import org.apache.linkis.metadata.query.common.service.MetadataConnection; import java.util.List; import java.util.Map; import java.util.stream.Collectors; -public class EsMetaService extends AbstractMetaService { +public class EsMetaService extends AbstractDbMetaService { @Override public MetadataConnection getConnection( String operator, Map params) throws Exception { diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/pom.xml new file mode 100644 index 00000000000..b040afeed70 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/pom.xml @@ -0,0 +1,110 @@ + + + + 4.0.0 + + org.apache.linkis + linkis + 1.3.0 + ../../../../../pom.xml + + + linkis-metadata-query-service-hdfs + + + UTF-8 + + + + org.apache.linkis + linkis-metadata-query-common + ${project.version} + provided + + + org.apache.linkis + linkis-module + ${project.version} + provided + + + org.apache.linkis + linkis-hadoop-common + ${project.version} + provided + + + org.apache.linkis + linkis-storage + ${project.version} + provided + + + + ${project.artifactId}-${project.version} + + + src/main/resources + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + 3.2.0 + false + + false + out + false + false + + src/main/assembly/distribution.xml + + + + + make-assembly + + single + + package + + + src/main/assembly/distribution.xml + + + + + + + + diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/assembly/distribution.xml new file mode 100644 index 00000000000..912fabc9a5e --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/assembly/distribution.xml @@ -0,0 +1,57 @@ + + + + + linkis-metadata-query-service-hdfs + + zip + dir + + false + linkis-metadata-query-service-es + + + + + + lib + true + true + false + false + true + + + + + + ${basedir}/src/main/resources + + * + + 0777 + conf + unix + + + + + diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsConnection.java new file mode 100644 index 00000000000..8bec4adb90f --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsConnection.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.service; + +import org.apache.linkis.hadoop.common.utils.HDFSUtils; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; + +import java.io.Closeable; +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Hdfs connection */ +public class HdfsConnection implements Closeable { + + private static final Logger LOG = LoggerFactory.getLogger(HdfsConnection.class); + + /** Hadoop configuration */ + private final Configuration hadoopConf; + + /** File system */ + private final FileSystem fs; + + public HdfsConnection(String scheme, String operator, String clusterLabel, boolean cache) + throws IOException { + // TODO fix the problem of connecting multiple cluster in FSFactory.getFSByLabelAndUser + // Fs fileSystem = FSFactory.getFSByLabelAndUser(scheme, operator, clusterLabel); + hadoopConf = HDFSUtils.getConfigurationByLabel(operator, clusterLabel); + fs = createFileSystem(operator, this.hadoopConf, cache); + } + + public HdfsConnection( + String scheme, String operator, Map configuration, boolean cache) { + if (Objects.nonNull(configuration)) { + hadoopConf = new Configuration(); + configuration.forEach(hadoopConf::set); + } else { + hadoopConf = HDFSUtils.getConfiguration(operator); + } + fs = createFileSystem(operator, this.hadoopConf, cache); + } + + @Override + public void close() throws IOException { + this.fs.close(); + } + + /** + * Get schema value + * + * @return schema + */ + public String getSchema() { + return fs.getScheme(); + } + + /** + * Get hadoop configuration + * + * @return configuration + */ + public Configuration getConfiguration() { + return this.hadoopConf; + } + + /** + * Get file system + * + * @return file system + */ + public FileSystem getFileSystem() { + return this.fs; + } + /** + * Create file system + * + * @param operator operator + * @param hadoopConf hadoop conf + * @param cache cache + * @return file system + */ + private FileSystem createFileSystem(String operator, Configuration hadoopConf, boolean cache) { + if (!cache) { + hadoopConf.set("fs.hdfs.impl.disable.cache", "true"); + } + return HDFSUtils.createFileSystem(operator, hadoopConf); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsMetaService.java new file mode 100644 index 00000000000..4436859039e --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsMetaService.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.service; + +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.datasourcemanager.common.util.json.Json; +import org.apache.linkis.hadoop.common.conf.HadoopConf; +import org.apache.linkis.metadata.query.common.exception.MetaRuntimeException; +import org.apache.linkis.metadata.query.common.service.AbstractFsMetaService; +import org.apache.linkis.metadata.query.common.service.MetadataConnection; +import org.apache.linkis.metadata.query.service.conf.ConfigurationUtils; + +import org.apache.commons.lang3.StringUtils; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Hdfs meta service */ +public class HdfsMetaService extends AbstractFsMetaService { + + private static final Logger LOG = LoggerFactory.getLogger(HdfsMetaService.class); + + private static final String PARAM_FILTER_RULE = "filter"; + /** Filter rules */ + private static final CommonVars DEFAULT_FILTER_RULES = + CommonVars.apply( + "wds.linkis.server.mdm.service.hadoop.filter.rules", + StringUtils.join( + new String[] { + "fs.defaultFS", + "dfs.nameservices", + "dfs.ha.namenodes.", + "dfs.namenode.rpc-address.", + "dfs.client.failover.proxy.provider." + }, + ",")); + + @Override + public MetadataConnection getConnection( + String creator, Map params) throws Exception { + Map hadoopConf = toMap(params, HdfsParamsMapper.PARAM_HADOOP_CONF.getValue()); + if (Objects.nonNull(hadoopConf) && !hadoopConf.isEmpty()) { + return new MetadataConnection<>( + new HdfsConnection("", creator, hadoopConf, !useCache()), true); + } else { + String clusterLabel = + Optional.ofNullable(toMap(params, "labels")) + .orElse(Collections.emptyMap()) + .get(HdfsParamsMapper.PARAM_HADOOP_LABEL_CLUSTER.getValue()); + LOG.info("Use Hadoop root config directory: " + HadoopConf.hadoopConfDir()); + return new MetadataConnection<>( + new HdfsConnection("", creator, clusterLabel, !useCache()), true); + } + } + + @Override + public Map queryConnectionInfo( + HdfsConnection connection, Map queryParams) { + List filterRules = new ArrayList<>(); + AtomicReference uriReference = new AtomicReference<>(); + Optional.ofNullable(queryParams.get("uri")) + .ifPresent( + uri -> { + try { + uriReference.set(new URI(uri)); + } catch (URISyntaxException e) { + LOG.warn("Unrecognized uri value: [" + uri + "]", e); + } + }); + Optional.ofNullable(queryParams.get(PARAM_FILTER_RULE)) + .ifPresent( + rules -> { + if (StringUtils.isNotBlank(rules)) { + filterRules.addAll(Arrays.asList(rules.split(","))); + } + }); + if (filterRules.isEmpty()) { + filterRules.addAll(Arrays.asList(DEFAULT_FILTER_RULES.getValue().split(","))); + } + return ConfigurationUtils.filterConfiguration( + connection.getFileSystem(), filterRules, uriReference.get()); + } + + @SuppressWarnings("unchecked") + private Map toMap(Map connectParams, String key) { + Map valueMap = new HashMap<>(); + Object mapObj = connectParams.get(key); + if (Objects.nonNull(mapObj)) { + try { + if (!(mapObj instanceof Map)) { + valueMap = Json.fromJson(String.valueOf(mapObj), Map.class, String.class, String.class); + } else { + valueMap = (Map) mapObj; + } + } catch (Exception e) { + throw new MetaRuntimeException("Cannot parse the param:[" + key + "]", null); + } + } + return valueMap; + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsParamsMapper.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsParamsMapper.java new file mode 100644 index 00000000000..56e7efc6609 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsParamsMapper.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.service; + +import org.apache.linkis.common.conf.CommonVars; + +public class HdfsParamsMapper { + + public static final CommonVars PARAM_HADOOP_CONF = + CommonVars.apply("wds.linkis.server.mdm.service.hadoop.conf", "hadoopConf"); + + public static final CommonVars PARAM_HADOOP_LABEL_CLUSTER = + CommonVars.apply("wds.linkis.server.mdm.service.hadoop.label.cluster", "hadoopCluster"); +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/conf/ConfigurationUtils.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/conf/ConfigurationUtils.java new file mode 100644 index 00000000000..9a1d49c60c5 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/conf/ConfigurationUtils.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.query.service.conf; + +import org.apache.linkis.common.conf.CommonVars; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.viewfs.Constants; +import org.apache.hadoop.fs.viewfs.ViewFileSystem; + +import java.net.URI; +import java.util.*; +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** Utils to deal with configuration */ +public class ConfigurationUtils { + + private static final CommonVars CONFIG_VIEWFS_LINK_FALLBACK = + CommonVars.apply("wds.linkis.server.mdm.hadoop.conf.link.fallback", "linkFallback"); + + private static final CommonVars CONFIG_VIEWFS_LINK_NFLY = + CommonVars.apply("wds.linkis.server.mdm.hadoop.conf.link.nfly", "linkNfly"); + /** Placeholder */ + private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("<[^>]*?>"); + + private static final Function FILTER_ALL_CONFIG = config -> false; + + /** + * Filter configuration + * + * @param fileSystem file system + * @param filterRules filter rules + * @param uri uri + * @return props + */ + public static Map filterConfiguration( + FileSystem fileSystem, List filterRules, URI uri) { + Map filteredProps = new HashMap<>(); + Configuration hadoopConf = fileSystem.getConf(); + List rules = + Objects.isNull(filterRules) ? new ArrayList<>() : new ArrayList<>(filterRules); + Function acceptableFunction = FILTER_ALL_CONFIG; + if (fileSystem instanceof ViewFileSystem) { + acceptableFunction = addViewFileSystemFilterRules(fileSystem, rules, uri); + } + Pattern pattern = rulesToPattern(rules); + Function finalAcceptableFunction = acceptableFunction; + hadoopConf.forEach( + entry -> { + String key = entry.getKey(); + if (pattern.matcher(key).matches() || finalAcceptableFunction.apply(key)) { + filteredProps.put(key, entry.getValue()); + } + }); + return filteredProps; + } + + /** + * Filter rules to pattern + * + * @param filterRules filter rules + * @return pattern + */ + private static Pattern rulesToPattern(List filterRules) { + StringBuffer sb = new StringBuffer("^("); + for (int i = 0; i < filterRules.size(); i++) { + String rule = filterRules.get(i); + if (StringUtils.isNotBlank(rule)) { + Matcher matcher = PLACEHOLDER_PATTERN.matcher(rule); + while (matcher.find()) { + matcher.appendReplacement(sb, "[\\\\s\\\\S]*?"); + } + matcher.appendTail(sb); + if (i < filterRules.size() - 1) { + sb.append("|"); + } + } + } + ; + sb.append(")$"); + return Pattern.compile(sb.toString().replace(".", "[.]")); + } + + /** + * Add filter rules for view FileSystem + * + * @param filerRules filter rules + * @param uri uri + * @return filter function + */ + private static Function addViewFileSystemFilterRules( + FileSystem fileSystem, List filerRules, URI uri) { + String mountTableName = + Optional.ofNullable(uri) + .orElse(FileSystem.getDefaultUri(fileSystem.getConf())) + .getAuthority(); + if (StringUtils.isBlank(mountTableName)) { + mountTableName = Constants.CONFIG_VIEWFS_DEFAULT_MOUNT_TABLE; + } + if (Objects.nonNull(uri) && fileSystem.getScheme().equals(uri.getScheme())) { + // Just load the default mountable configuration + String linkPrefix = Constants.CONFIG_VIEWFS_PREFIX + "." + mountTableName + "."; + String linkBasicPrefix = linkPrefix + Constants.CONFIG_VIEWFS_LINK; + String linkMergePrefix = linkPrefix + Constants.CONFIG_VIEWFS_LINK_MERGE; + // linkFallback, linkNfly in HADOOP-13055 + filerRules.add(linkPrefix + Constants.CONFIG_VIEWFS_LINK_MERGE_SLASH); + filerRules.add(linkPrefix + CONFIG_VIEWFS_LINK_FALLBACK.getValue()); + filerRules.add(linkPrefix + CONFIG_VIEWFS_LINK_NFLY.getValue()); + filerRules.add(linkPrefix + Constants.CONFIG_VIEWFS_HOMEDIR); + String path = uri.getPath(); + return config -> + (config.startsWith(linkBasicPrefix) + && path.startsWith(config.substring(linkBasicPrefix.length() + 1))) + || (config.startsWith(linkMergePrefix) + && path.startsWith(config.substring(linkMergePrefix.length() + 1))); + } else { + // Load in all the mountable configuration + filerRules.add(Constants.CONFIG_VIEWFS_PREFIX + "." + mountTableName + "."); + } + return FILTER_ALL_CONFIG; + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml index ed3095fc05c..b39cd489f18 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../../../pom.xml linkis-metadata-query-service-hive diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/assembly/distribution.xml index 54693f8f800..e2e2b5d0fda 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/assembly/distribution.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/assembly/distribution.xml @@ -16,10 +16,8 @@ ~ limitations under the License. --> - + linkis-metadata-query-service-hive zip diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveMetaService.java index 5a506373f77..4fc85e6299d 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveMetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveMetaService.java @@ -25,7 +25,7 @@ import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; import org.apache.linkis.metadata.query.common.domain.MetaPartitionInfo; import org.apache.linkis.metadata.query.common.exception.MetaRuntimeException; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; +import org.apache.linkis.metadata.query.common.service.AbstractDbMetaService; import org.apache.linkis.metadata.query.common.service.MetadataConnection; import org.apache.commons.io.FileUtils; @@ -46,7 +46,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class HiveMetaService extends AbstractMetaService { +public class HiveMetaService extends AbstractDbMetaService { private static final Logger LOG = LoggerFactory.getLogger(HiveMetaService.class); private static final CommonVars TMP_FILE_STORE_LOCATION = diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/pom.xml index 2ae6eed48d6..7705ea69b7f 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../../../pom.xml linkis-metadata-query-service-kafka diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/assembly/distribution.xml index 997518b487c..4181b7b3f28 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/assembly/distribution.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/assembly/distribution.xml @@ -16,10 +16,8 @@ ~ limitations under the License. --> - + linkis-metadata-query-service-kafka zip diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaMetaService.java index a282ecb6169..d1e30ac88f4 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaMetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaMetaService.java @@ -22,7 +22,7 @@ import org.apache.linkis.bml.protocol.BmlDownloadResponse; import org.apache.linkis.common.conf.CommonVars; import org.apache.linkis.metadata.query.common.exception.MetaRuntimeException; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; +import org.apache.linkis.metadata.query.common.service.AbstractDbMetaService; import org.apache.linkis.metadata.query.common.service.MetadataConnection; import org.apache.commons.io.FileUtils; @@ -44,7 +44,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class KafkaMetaService extends AbstractMetaService { +public class KafkaMetaService extends AbstractDbMetaService { private static final Logger LOG = LoggerFactory.getLogger(KafkaMetaService.class); private static final CommonVars TMP_FILE_STORE_LOCATION = diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/pom.xml index 1528a715422..33982bfda81 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../../../pom.xml linkis-metadata-query-service-mysql diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/assembly/distribution.xml index dab81a3665b..67746c06e97 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/assembly/distribution.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/assembly/distribution.xml @@ -16,10 +16,8 @@ ~ limitations under the License. --> - + linkis-metadata-query-service-mysql zip diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/Db2MetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/Db2MetaService.java index f92cb873828..81089930312 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/Db2MetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/Db2MetaService.java @@ -19,7 +19,7 @@ import org.apache.linkis.datasourcemanager.common.util.json.Json; import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; +import org.apache.linkis.metadata.query.common.service.AbstractDbMetaService; import org.apache.linkis.metadata.query.common.service.MetadataConnection; import org.apache.linkis.metadata.query.service.conf.SqlParamsMapper; import org.apache.linkis.metadata.query.service.db2.SqlConnection; @@ -32,7 +32,7 @@ import java.util.Map; @Component -public class Db2MetaService extends AbstractMetaService { +public class Db2MetaService extends AbstractDbMetaService { @Override public MetadataConnection getConnection( String operator, Map params) throws Exception { diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/DmMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/DmMetaService.java index 464f37a85a2..eedc65080b9 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/DmMetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/DmMetaService.java @@ -19,7 +19,7 @@ import org.apache.linkis.datasourcemanager.common.util.json.Json; import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; +import org.apache.linkis.metadata.query.common.service.AbstractDbMetaService; import org.apache.linkis.metadata.query.common.service.MetadataConnection; import org.apache.linkis.metadata.query.service.conf.SqlParamsMapper; import org.apache.linkis.metadata.query.service.dm.SqlConnection; @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; -public class DmMetaService extends AbstractMetaService { +public class DmMetaService extends AbstractDbMetaService { @Override public MetadataConnection getConnection( String operator, Map params) throws Exception { diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/GreenplumMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/GreenplumMetaService.java index f882d2a71ff..646ea73ad80 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/GreenplumMetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/GreenplumMetaService.java @@ -19,7 +19,7 @@ import org.apache.linkis.datasourcemanager.common.util.json.Json; import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; +import org.apache.linkis.metadata.query.common.service.AbstractDbMetaService; import org.apache.linkis.metadata.query.common.service.MetadataConnection; import org.apache.linkis.metadata.query.service.conf.SqlParamsMapper; import org.apache.linkis.metadata.query.service.greenplum.SqlConnection; @@ -31,7 +31,7 @@ import java.util.List; import java.util.Map; -public class GreenplumMetaService extends AbstractMetaService { +public class GreenplumMetaService extends AbstractDbMetaService { @Override public MetadataConnection getConnection( String operator, Map params) throws Exception { diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/KingbaseMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/KingbaseMetaService.java index c65d846e844..4e4fe41901b 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/KingbaseMetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/KingbaseMetaService.java @@ -19,7 +19,7 @@ import org.apache.linkis.datasourcemanager.common.util.json.Json; import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; +import org.apache.linkis.metadata.query.common.service.AbstractDbMetaService; import org.apache.linkis.metadata.query.common.service.MetadataConnection; import org.apache.linkis.metadata.query.service.conf.SqlParamsMapper; import org.apache.linkis.metadata.query.service.kingbase.SqlConnection; @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; -public class KingbaseMetaService extends AbstractMetaService { +public class KingbaseMetaService extends AbstractDbMetaService { @Override public MetadataConnection getConnection( diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/MysqlMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/MysqlMetaService.java index 7b125bae4f3..325a8d0fbd2 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/MysqlMetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/MysqlMetaService.java @@ -19,7 +19,7 @@ import org.apache.linkis.datasourcemanager.common.util.json.Json; import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; +import org.apache.linkis.metadata.query.common.service.AbstractDbMetaService; import org.apache.linkis.metadata.query.common.service.MetadataConnection; import org.apache.linkis.metadata.query.service.conf.SqlParamsMapper; import org.apache.linkis.metadata.query.service.mysql.SqlConnection; @@ -31,7 +31,7 @@ import java.util.List; import java.util.Map; -public class MysqlMetaService extends AbstractMetaService { +public class MysqlMetaService extends AbstractDbMetaService { @Override public MetadataConnection getConnection( String operator, Map params) throws Exception { diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/OracleMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/OracleMetaService.java index 15b36d7ab97..2d23542bd04 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/OracleMetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/OracleMetaService.java @@ -19,7 +19,7 @@ import org.apache.linkis.datasourcemanager.common.util.json.Json; import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; +import org.apache.linkis.metadata.query.common.service.AbstractDbMetaService; import org.apache.linkis.metadata.query.common.service.MetadataConnection; import org.apache.linkis.metadata.query.service.conf.SqlParamsMapper; import org.apache.linkis.metadata.query.service.oracle.SqlConnection; @@ -32,7 +32,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class OracleMetaService extends AbstractMetaService { +public class OracleMetaService extends AbstractDbMetaService { private static final Logger LOG = LoggerFactory.getLogger(OracleMetaService.class); @Override diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/PostgresqlMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/PostgresqlMetaService.java index 7dbe633e724..134ddb341bd 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/PostgresqlMetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/PostgresqlMetaService.java @@ -19,7 +19,7 @@ import org.apache.linkis.datasourcemanager.common.util.json.Json; import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; +import org.apache.linkis.metadata.query.common.service.AbstractDbMetaService; import org.apache.linkis.metadata.query.common.service.MetadataConnection; import org.apache.linkis.metadata.query.service.conf.SqlParamsMapper; import org.apache.linkis.metadata.query.service.postgres.SqlConnection; @@ -31,7 +31,7 @@ import java.util.List; import java.util.Map; -public class PostgresqlMetaService extends AbstractMetaService { +public class PostgresqlMetaService extends AbstractDbMetaService { @Override public MetadataConnection getConnection( String operator, Map params) throws Exception { diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/SqlserverMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/SqlserverMetaService.java index 9225a0d0bd6..3d9b4874259 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/SqlserverMetaService.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mysql/src/main/java/org/apache/linkis/metadata/query/service/SqlserverMetaService.java @@ -19,7 +19,7 @@ import org.apache.linkis.datasourcemanager.common.util.json.Json; import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; -import org.apache.linkis.metadata.query.common.service.AbstractMetaService; +import org.apache.linkis.metadata.query.common.service.AbstractDbMetaService; import org.apache.linkis.metadata.query.common.service.MetadataConnection; import org.apache.linkis.metadata.query.service.conf.SqlParamsMapper; import org.apache.linkis.metadata.query.service.sqlserver.SqlConnection; @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; -public class SqlserverMetaService extends AbstractMetaService { +public class SqlserverMetaService extends AbstractDbMetaService { @Override public MetadataConnection getConnection( String operator, Map params) throws Exception { diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml index 5567537d221..69bde0696a4 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-metadata diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/assembly/distribution.xml index 2eaa687143f..0d872b4972d 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/assembly/distribution.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-metadata dir diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/errorcode/LinkisMetadataErrorCodeSummary.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/errorcode/LinkisMetadataErrorCodeSummary.java new file mode 100644 index 00000000000..305abf66cb7 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/errorcode/LinkisMetadataErrorCodeSummary.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.errorcode; + +public enum LinkisMetadataErrorCodeSummary { + UNRECOGNIZED_IMPORT_TYPE( + 57895, "unrecognized import type(无法识别的导入类型)", "unrecognized import type(无法识别的导入类型)"), + IMPORT_HIVE_SOURCE_IS_NULL( + 57895, "import hive source is null(导入配置单元源为空)", "import hive source is null(导入配置单元源为空)"), + HIVE_CREATE_IS_NULL( + 57895, + "Hive create table destination database or tablename is null(Hive 创建表目标数据库或表名为空)", + "Hive create table destination database or tablename is null(Hive 创建表目标数据库或表名为空)"), + HIVE_CREATE__TABLE_IS_NULL( + 57895, + "hive create table source table name is null(hive 创建表源表名为空)", + "hive create table source table name is null(hive 创建表源表名为空)"), + PARTITION_IS_NULL( + 57895, + "partition name or type is null(分区名称或类型为空)", + "partition name or type is null(分区名称或类型为空)"), + EXPRESS_CODE(57895, "", ""); + + /** (errorCode)错误码 */ + private int errorCode; + /** (errorDesc)错误描述 */ + private String errorDesc; + /** Possible reasons for the error(错误可能出现的原因) */ + private String comment; + + LinkisMetadataErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ImportDDLCreator.scala b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ImportDDLCreator.scala index 9992269409e..7e7d464b0b5 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ImportDDLCreator.scala +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ImportDDLCreator.scala @@ -22,6 +22,7 @@ import org.apache.linkis.common.io.FsPath import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.metadata.conf.MdqConfiguration import org.apache.linkis.metadata.domain.mdq.bo.{MdqTableBO, MdqTableFieldsInfoBO} +import org.apache.linkis.metadata.errorcode.LinkisMetadataErrorCodeSummary._ import org.apache.linkis.metadata.exception.MdqIllegalParamException import org.apache.linkis.storage.FSFactory import org.apache.linkis.storage.fs.FileSystem @@ -74,13 +75,13 @@ object FileImportDDLHelper extends ImportHelper with Logging { val args = importInfo.getArgs val _source = if (StringUtils.isEmpty(importInfo.getSource)) { - throw MdqIllegalParamException("import hive source is null") + throw MdqIllegalParamException(IMPORT_HIVE_SOURCE_IS_NULL.getErrorDesc) } else { importInfo.getSource } val _destination = if (StringUtils.isEmpty(importInfo.getDestination)) { - throw MdqIllegalParamException("import hive source is null") + throw MdqIllegalParamException(IMPORT_HIVE_SOURCE_IS_NULL.getErrorDesc) } else { importInfo.getDestination } @@ -173,17 +174,17 @@ object HiveImportDDLHelper extends ImportHelper with SQLConst with Logging { val destinationTable = mdqTableBO.getTableBaseInfo.getBase.getName if (StringUtils.isEmpty(destinationDatabase) || StringUtils.isEmpty(destinationTable)) { logger.error("Hive create table destination database or tablename is null") - throw MdqIllegalParamException("Hive create table destination database or tablename is null") + throw MdqIllegalParamException(HIVE_CREATE_IS_NULL.getErrorDesc) } val sourceDatabase = if (StringUtils.isEmpty(args.get(DATABASE))) { - throw MdqIllegalParamException("hive create table source database is null") + throw MdqIllegalParamException(HIVE_CREATE__TABLE_IS_NULL.getErrorDesc) } else { args.get(DATABASE) } val sourceTableName = if (StringUtils.isEmpty(args.get(TABLE))) { - throw MdqIllegalParamException("hive create table source table name is null") + throw MdqIllegalParamException(HIVE_CREATE__TABLE_IS_NULL.getErrorDesc) } else { args.get(TABLE) } diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ScalaDDLCreator.scala b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ScalaDDLCreator.scala index 439826482a9..dd2bda9574f 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ScalaDDLCreator.scala +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ScalaDDLCreator.scala @@ -20,6 +20,7 @@ package org.apache.linkis.metadata.ddl import org.apache.linkis.common.utils.Logging import org.apache.linkis.metadata.conf.MdqConfiguration import org.apache.linkis.metadata.domain.mdq.bo.{MdqTableBO, MdqTableFieldsInfoBO} +import org.apache.linkis.metadata.errorcode.LinkisMetadataErrorCodeSummary.PARTITION_IS_NULL import org.apache.linkis.metadata.exception.MdqIllegalParamException import org.apache.commons.lang3.StringUtils @@ -60,7 +61,7 @@ object ScalaDDLCreator extends DDLCreator with SQLConst with Logging { val name = p.getName val _type = p.getType if (StringUtils.isEmpty(name) || StringUtils.isEmpty(_type)) { - throw MdqIllegalParamException("partition name or type is null") + throw MdqIllegalParamException(PARTITION_IS_NULL.getErrorDesc) } partitionArr += (name + SPACE + _type) } diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/exception/MdqIllegalParamException.scala b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/exception/MdqIllegalParamException.scala index 8a1620d41a0..d22fb745151 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/exception/MdqIllegalParamException.scala +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/exception/MdqIllegalParamException.scala @@ -18,5 +18,7 @@ package org.apache.linkis.metadata.exception import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.metadata.errorcode.LinkisMetadataErrorCodeSummary.EXPRESS_CODE -case class MdqIllegalParamException(errMsg: String) extends ErrorException(57895, errMsg) +case class MdqIllegalParamException(errMsg: String) + extends ErrorException(EXPRESS_CODE.getErrorCode, errMsg) diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/Scan.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/Scan.java new file mode 100644 index 00000000000..485e1b6f91e --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/Scan.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata; + +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; + +import org.mybatis.spring.annotation.MapperScan; + +@EnableAutoConfiguration +@MapperScan( + basePackages = {"org.apache.linkis.metadata.dao", "org.apache.linkis.metadata.hive.dao"}) +public class Scan {} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/WebApplicationServer.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/WebApplicationServer.java new file mode 100644 index 00000000000..2f84ec2c331 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/WebApplicationServer.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata; + +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.web.servlet.ServletComponentScan; +import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; +import org.springframework.context.annotation.ComponentScan; + +@EnableAutoConfiguration +@ServletComponentScan +@ComponentScan +public class WebApplicationServer extends SpringBootServletInitializer { + + public static void main(String[] args) { + new SpringApplicationBuilder(WebApplicationServer.class).run(args); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/condition/DataSourceConditionTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/condition/DataSourceConditionTest.java new file mode 100644 index 00000000000..fcc432c9469 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/condition/DataSourceConditionTest.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.condition; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class DataSourceConditionTest { + + @Test + @DisplayName("matchesTest") + public void matchesTest() { + + boolean matches = new DataSourceCondition().matches(null, null); + Assertions.assertTrue(matches); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/dao/BaseDaoTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/dao/BaseDaoTest.java new file mode 100644 index 00000000000..d1e08215c47 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/dao/BaseDaoTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.dao; + +import org.apache.linkis.metadata.Scan; + +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; +import org.springframework.transaction.annotation.EnableTransactionManagement; +import org.springframework.transaction.annotation.Transactional; + +@SpringBootTest(classes = Scan.class) +@Transactional +@Rollback(true) +@EnableTransactionManagement +public abstract class BaseDaoTest {} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/dao/MdqDaoTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/dao/MdqDaoTest.java new file mode 100644 index 00000000000..56da4160a96 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/dao/MdqDaoTest.java @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.dao; + +import org.apache.linkis.metadata.domain.mdq.po.MdqField; +import org.apache.linkis.metadata.domain.mdq.po.MdqImport; +import org.apache.linkis.metadata.domain.mdq.po.MdqLineage; +import org.apache.linkis.metadata.domain.mdq.po.MdqTable; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class MdqDaoTest extends BaseDaoTest { + + @Autowired private MdqDao mdqDao; + + private MdqTable createMdpTable() { + MdqTable table = new MdqTable(); + table.setDatabase("hadoop_ind"); + table.setName("t_user"); + table.setAlias("t_user"); + table.setCreator("hadoop"); + table.setComment("test create"); + table.setCreateTime(new Date()); + table.setUsage("test"); + table.setLifecycle(0); + table.setUseWay(0); + table.setImport(false); + table.setModelLevel(0); + table.setPartitionTable(false); + table.setModelLevel(0); + table.setAvailable(true); + return table; + } + + private MdqField createMdpField() { + MdqField mdqField = new MdqField(); + mdqField.setTableId(1L); + mdqField.setName("name"); + mdqField.setType("string"); + mdqField.setComment("姓名字段"); + mdqField.setPartitionField(false); + mdqField.setPrimary(false); + mdqField.setLength(255); + return mdqField; + } + + @Test + @DisplayName("activateTableTest") + public void activateTableTest() { + + mdqDao.activateTable(1L); + MdqTable mdqTable = mdqDao.selectTableForUpdate("ods_user_md_ind", "t_student_temp"); + Assertions.assertTrue(mdqTable.getAvailable().booleanValue()); + } + + @Test + @DisplayName("selectTableByNameTest") + public void selectTableByNameTest() { + MdqTable mdqTable = mdqDao.selectTableByName("ods_user_md_ind", "t_student_temp", "hadoop"); + Assertions.assertNotNull(mdqTable); + } + + @Test + @DisplayName("listMdqFieldByTableIdTest") + public void listMdqFieldByTableIdTest() { + + List mdqFields = mdqDao.listMdqFieldByTableId(1L); + Assertions.assertTrue(mdqFields.size() > 0); + } + + @Test + @DisplayName("insertTableTest") + public void insertTableTest() { + MdqTable mdpTable = createMdpTable(); + mdqDao.insertTable(mdpTable); + MdqTable mdqTableDao = + mdqDao.selectTableByName(mdpTable.getDatabase(), mdpTable.getName(), mdpTable.getCreator()); + Assertions.assertNotNull(mdqTableDao); + } + + @Test + @DisplayName("insertImportTest") + public void insertImportTest() { + + Assertions.assertAll( + () -> { + MdqImport mdqImport = new MdqImport(); + mdqImport.setTableId(1L); + mdqImport.setArgs("name"); + mdqImport.setImportType(0); + mdqDao.insertImport(mdqImport); + }); + } + + @Test + @DisplayName("insertLineageTest") + public void insertLineageTest() { + + Assertions.assertAll( + () -> { + MdqLineage mdqLineage = new MdqLineage(); + mdqLineage.setTableId(1L); + mdqLineage.setSourceTable("hadoop_ind"); + mdqLineage.setUpdateTime(new Date()); + mdqDao.insertLineage(mdqLineage); + }); + } + + @Test + @DisplayName("selectTableForUpdateTest") + public void selectTableForUpdateTest() { + MdqTable mdqTable = mdqDao.selectTableForUpdate("ods_user_md_ind", "t_student_temp"); + Assertions.assertNotNull(mdqTable); + } + + @Test + @DisplayName("deleteTableBaseInfoTest") + public void deleteTableBaseInfoTest() { + + mdqDao.deleteTableBaseInfo(1L); + MdqTable mdqTable = mdqDao.selectTableForUpdate("ods_user_md_ind", "t_student_temp"); + Assertions.assertNull(mdqTable); + } + + @Test + @DisplayName("insertFieldsTest") + public void insertFieldsTest() { + List list = new ArrayList<>(Arrays.asList(createMdpField())); + mdqDao.insertFields(list); + List mdqFields = mdqDao.listMdqFieldByTableId(1L); + Assertions.assertTrue(mdqFields.size() > 0); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/errorcode/LinkisMetadataErrorCodeSummaryTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/errorcode/LinkisMetadataErrorCodeSummaryTest.java new file mode 100644 index 00000000000..77a98c0636a --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/errorcode/LinkisMetadataErrorCodeSummaryTest.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.errorcode; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class LinkisMetadataErrorCodeSummaryTest { + + @Test + @DisplayName("enumTest") + public void enumTest() { + + int unrecognizedCode = LinkisMetadataErrorCodeSummary.UNRECOGNIZED_IMPORT_TYPE.getErrorCode(); + int importHiveCode = LinkisMetadataErrorCodeSummary.IMPORT_HIVE_SOURCE_IS_NULL.getErrorCode(); + int hiveCreateNullCode = LinkisMetadataErrorCodeSummary.HIVE_CREATE_IS_NULL.getErrorCode(); + int hiveCreateTableNullCode = + LinkisMetadataErrorCodeSummary.HIVE_CREATE__TABLE_IS_NULL.getErrorCode(); + int partitionCode = LinkisMetadataErrorCodeSummary.PARTITION_IS_NULL.getErrorCode(); + int expressCode = LinkisMetadataErrorCodeSummary.EXPRESS_CODE.getErrorCode(); + + Assertions.assertTrue(57895 == unrecognizedCode); + Assertions.assertTrue(57895 == importHiveCode); + Assertions.assertTrue(57895 == hiveCreateNullCode); + Assertions.assertTrue(57895 == hiveCreateTableNullCode); + Assertions.assertTrue(57895 == partitionCode); + Assertions.assertTrue(57895 == expressCode); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/hive/dao/HiveMetaDaoTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/hive/dao/HiveMetaDaoTest.java new file mode 100644 index 00000000000..87e072d35e9 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/hive/dao/HiveMetaDaoTest.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.hive.dao; + +import org.apache.linkis.metadata.dao.BaseDaoTest; +import org.apache.linkis.metadata.hive.dto.MetadataQueryParam; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class HiveMetaDaoTest extends BaseDaoTest { + + @Autowired private HiveMetaDao hiveMetaDao; + + @Test + @DisplayName("getLocationByDbAndTableTest") + public void getLocationByDbAndTableTest() { + + MetadataQueryParam queryParam = new MetadataQueryParam(); + queryParam.setDbName("default"); + queryParam.setTableName("employee"); + String location = hiveMetaDao.getLocationByDbAndTable(queryParam); + Assertions.assertNotNull(location); + } + + @Test + @DisplayName("getAllDbsTest") + public void getAllDbsTest() { + + List dbs = hiveMetaDao.getAllDbs(); + + Assertions.assertTrue(dbs.size() > 0); + } + + @Test + @DisplayName("getRolesByUserTest") + public void getRolesByUserTest() { + + List roles = hiveMetaDao.getRolesByUser("admin"); + Assertions.assertTrue(roles.size() == 0); + } + + @Test + @DisplayName("getDbsByUserAndRolesTest") + public void getDbsByUserAndRolesTest() { + + List dbs = hiveMetaDao.getDbsByUserAndRoles("admin", new ArrayList<>()); + Assertions.assertTrue(dbs.size() == 0); + } + + @Test + @DisplayName("getTablesByDbNameAndUserAndRolesTest") + public void getTablesByDbNameAndUserAndRolesTest() { + MetadataQueryParam queryParam = new MetadataQueryParam(); + queryParam.setDbName("default"); + queryParam.setTableName("employee"); + queryParam.setUserName("admin"); + List> tables = hiveMetaDao.getTablesByDbNameAndUserAndRoles(queryParam); + Assertions.assertTrue(tables.size() == 0); + } + + @Test + @DisplayName("getTablesByDbNameTest") + public void getTablesByDbNameTest() { + + MetadataQueryParam queryParam = new MetadataQueryParam(); + queryParam.setDbName("default"); + List> tables = hiveMetaDao.getTablesByDbName(queryParam); + Assertions.assertTrue(tables.size() == 1); + } + + @Test + @DisplayName("getPartitionSizeTest") + public void getPartitionSizeTest() { + + MetadataQueryParam queryParam = new MetadataQueryParam(); + queryParam.setDbName("default"); + queryParam.setTableName("employee"); + queryParam.setPartitionName("ds=202202"); + + Long size = hiveMetaDao.getPartitionSize(queryParam); + Assertions.assertTrue(size.longValue() >= 0); + } + + @Test + @DisplayName("getPartitionsTest") + public void getPartitionsTest() { + + MetadataQueryParam queryParam = new MetadataQueryParam(); + queryParam.setDbName("default"); + queryParam.setTableName("employee"); + + List partitions = hiveMetaDao.getPartitions(queryParam); + Assertions.assertTrue(partitions.size() >= 0); + } + + @Test + @DisplayName("getColumnsTest") + public void getColumnsTest() { + + MetadataQueryParam queryParam = new MetadataQueryParam(); + queryParam.setDbName("default"); + queryParam.setTableName("employee"); + + List> columns = hiveMetaDao.getColumns(queryParam); + Assertions.assertTrue(columns.size() >= 0); + } + + @Test + @DisplayName("getStorageDescriptionIDByDbTableNameAndUserTest") + public void getStorageDescriptionIDByDbTableNameAndUserTest() { + MetadataQueryParam queryParam = new MetadataQueryParam(); + queryParam.setDbName("default"); + queryParam.setTableName("employee"); + queryParam.setUserName("admin"); + Map list = hiveMetaDao.getStorageDescriptionIDByDbTableNameAndUser(queryParam); + Assertions.assertNull(list); + } + + @Test + @DisplayName("getColumnsByStorageDescriptionIDTest") + public void getColumnsByStorageDescriptionIDTest() { + + MetadataQueryParam queryParam = new MetadataQueryParam(); + queryParam.setSdId("1"); + List> columns = hiveMetaDao.getColumnsByStorageDescriptionID(queryParam); + Assertions.assertTrue(columns.size() >= 0); + } + + @Test + @DisplayName("getPartitionKeysTest") + public void getPartitionKeysTest() { + MetadataQueryParam queryParam = new MetadataQueryParam(); + queryParam.setDbName("default"); + queryParam.setTableName("employee"); + List> partitionKeys = hiveMetaDao.getPartitionKeys(queryParam); + Assertions.assertTrue(partitionKeys.size() > 0); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/type/LifecycleTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/type/LifecycleTest.java new file mode 100644 index 00000000000..d734d2452a7 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/type/LifecycleTest.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.type; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class LifecycleTest { + + @Test + @DisplayName("enumTest") + public void enumTest() { + + String permanentName = Lifecycle.Permanent.getName(); + String halfYearName = Lifecycle.HalfYear.getName(); + String thisMonthName = Lifecycle.ThisMonth.getName(); + String thisWeekName = Lifecycle.ThisWeek.getName(); + String toddayName = Lifecycle.Todday.getName(); + + Assertions.assertNotNull(permanentName); + Assertions.assertNotNull(halfYearName); + Assertions.assertNotNull(thisMonthName); + Assertions.assertNotNull(thisWeekName); + Assertions.assertNotNull(toddayName); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/type/ModelLevelTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/type/ModelLevelTest.java new file mode 100644 index 00000000000..4d26d8949f7 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/type/ModelLevelTest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.type; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ModelLevelTest { + + @Test + @DisplayName("enumTest") + public void enumTest() { + + String odsName = ModelLevel.ODS.getName(); + String dwdName = ModelLevel.DWD.getName(); + String dwsName = ModelLevel.DWS.getName(); + String adsName = ModelLevel.ADS.getName(); + + Assertions.assertEquals("原始数据层", odsName); + Assertions.assertEquals("明细数据层", dwdName); + Assertions.assertEquals("汇总数据层", dwsName); + Assertions.assertEquals("应用数据层", adsName); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/type/UseWayTypeTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/type/UseWayTypeTest.java new file mode 100644 index 00000000000..1db6c97a71a --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/type/UseWayTypeTest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.type; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class UseWayTypeTest { + + @Test + @DisplayName("enumTest") + public void enumTest() { + + String onceWriteMultiReadName = UseWayType.OnceWriteMultiRead.getName(); + String multiOverwriteName = UseWayType.MultiOverwrite.getName(); + String crudName = UseWayType.CRUD.getName(); + String occasionllyReadName = UseWayType.OnceWriteOccasionllyRead.getName(); + + Assertions.assertEquals("一次写入多次读", onceWriteMultiReadName); + Assertions.assertEquals("多次覆盖写", multiOverwriteName); + Assertions.assertEquals("增删改查", crudName); + Assertions.assertEquals("一次写偶尔读", occasionllyReadName); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/util/ConstantsTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/util/ConstantsTest.java new file mode 100644 index 00000000000..642f6bbd397 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/util/ConstantsTest.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.util; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ConstantsTest { + + @Test + @DisplayName("constTest") + public void constTest() { + + String applicationName = Constants.APPLICATION_NAME; + String submitted = Constants.SUBMITTED; + String approved = Constants.APPROVED; + String rejected = Constants.REJECTED; + String column = Constants.COLUMN; + String row = Constants.ROW; + String table = Constants.TABLE; + String script = Constants.SCRIPT; + + Assertions.assertNotNull(applicationName); + Assertions.assertNotNull(submitted); + Assertions.assertNotNull(approved); + Assertions.assertNotNull(rejected); + Assertions.assertNotNull(column); + Assertions.assertNotNull(row); + Assertions.assertNotNull(table); + Assertions.assertNotNull(script); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/util/DWSConfigTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/util/DWSConfigTest.java new file mode 100644 index 00000000000..2d27d320f79 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/util/DWSConfigTest.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.util; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class DWSConfigTest { + + @Test + @DisplayName("constTest") + public void constTest() { + + String hiveConfDir = DWSConfig.HIVE_CONF_DIR.getValue(); + String metaUrl = DWSConfig.HIVE_META_URL.getValue(); + String metaUser = DWSConfig.HIVE_META_USER.getValue(); + String hiveMetaPassword = DWSConfig.HIVE_META_PASSWORD.getValue(); + + Boolean encodeEnabled = DWSConfig.HIVE_PASS_ENCODE_ENABLED.getValue(); + Boolean hivePermissionWithLOGINUserEnabled = + DWSConfig.HIVE_PERMISSION_WITH_lOGIN_USER_ENABLED.getValue(); + String dbFilterKeywords = DWSConfig.DB_FILTER_KEYWORDS.getValue(); + String hiveDbAdminUser = DWSConfig.HIVE_DB_ADMIN_USER.getValue(); + String hdfsFileSystemRestErrs = DWSConfig.HDFS_FILE_SYSTEM_REST_ERRS; + + Assertions.assertNotNull(hiveConfDir); + Assertions.assertNotNull(metaUrl); + Assertions.assertNotNull(metaUser); + Assertions.assertNotNull(hiveMetaPassword); + + Assertions.assertFalse(encodeEnabled.booleanValue()); + Assertions.assertTrue(hivePermissionWithLOGINUserEnabled.booleanValue()); + + Assertions.assertNotNull(dbFilterKeywords); + Assertions.assertNotNull(hiveDbAdminUser); + Assertions.assertNotNull(hdfsFileSystemRestErrs); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/util/HiveUtilsTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/util/HiveUtilsTest.java new file mode 100644 index 00000000000..5d61b934687 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/util/HiveUtilsTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.util; + +import org.apache.hadoop.conf.Configuration; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class HiveUtilsTest { + + @Test + @DisplayName("getDefaultConfTest") + public void getDefaultConfTest() { + + Configuration configuration = HiveUtils.getDefaultConf("hadoop"); + Assertions.assertNotNull(configuration); + } + + @Test + @DisplayName("decodeTest") + public void decodeTest() { + + String hadoop = HiveUtils.decode("hadoop"); + Assertions.assertNotNull(hadoop); + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/application.properties b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/application.properties new file mode 100644 index 00000000000..5b519435c03 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/application.properties @@ -0,0 +1,59 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +#wds.linkis.test.mode=true +wds.linkis.server.version=v1 + +#test +wds.linkis.test.mode=true +wds.linkis.test.user=hadoop + + +##Linkis governance station administrators +wds.linkis.governance.station.admin=hadoop +wds.linkis.gateway.conf.publicservice.list=query,jobhistory,application,configuration,filesystem,udf,variable,microservice,errorcode,bml,datasource +# + +#logging.level.root=debug +#logging.file=./test.log +#debug=true + +spring.datasource.driver-class-name=org.h2.Driver +spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript from 'classpath:create.sql' +spring.datasource.username=sa +spring.datasource.password= +spring.datasource.hikari.connection-test-query=select 1 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.auto-commit=true +spring.datasource.hikari.validation-timeout=3000 +spring.datasource.hikari.pool-name=linkis-test +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.leak-detection-threshold=0 +spring.datasource.hikari.initialization-fail-timeout=1 + +spring.main.web-application-type=servlet +server.port=1234 +spring.h2.console.enabled=true + +#disable eureka discovery client +spring.cloud.service-registry.auto-registration.enabled=false +eureka.client.enabled=false +eureka.client.serviceUrl.registerWithEureka=false + +mybatis-plus.mapper-locations=classpath:org/apache/linkis/metadata/dao/impl/MdqDao.xml,classpath:org/apache/linkis/metadata/hive/dao/impl/HiveMetaDao.xml +mybatis-plus.type-aliases-package=org.apache.linkis.metadata.domain.mdq.po +mybatis-plus.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/create.sql b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/create.sql new file mode 100644 index 00000000000..b01112c8b21 --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/create.sql @@ -0,0 +1,235 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +SET FOREIGN_KEY_CHECKS=0; +SET REFERENTIAL_INTEGRITY FALSE; + +DROP TABLE IF EXISTS linkis_ps_datasource_table CASCADE; +CREATE TABLE linkis_ps_datasource_table ( + id bigint(255) AUTO_INCREMENT, + database varchar(64) , + name varchar(64) , + alias varchar(64) DEFAULT NULL, + creator varchar(16) , + comment varchar(255) DEFAULT NULL, + create_time datetime , + product_name varchar(64) DEFAULT NULL, + project_name varchar(255) DEFAULT NULL, + usage varchar(128) DEFAULT NULL, + lifecycle int(4) , + use_way int(4) , + is_import tinyint(1) , + model_level int(4) , + is_external_use tinyint(1) , + is_partition_table tinyint(1) , + is_available tinyint(1) , + PRIMARY KEY (id), + UNIQUE KEY database (database,name) +) ; + +DROP TABLE IF EXISTS linkis_ps_datasource_field CASCADE; +CREATE TABLE linkis_ps_datasource_field ( + id bigint(20) AUTO_INCREMENT, + table_id bigint(20) , + name varchar(64) , + alias varchar(64) DEFAULT NULL, + type varchar(64) , + comment varchar(255) DEFAULT NULL, + express varchar(255) DEFAULT NULL, + rule varchar(128) DEFAULT NULL, + is_partition_field tinyint(1) , + is_primary tinyint(1) , + length int(11) DEFAULT NULL, + mode_info varchar(128) DEFAULT NULL, + PRIMARY KEY (id) +) ; + +DROP TABLE IF EXISTS linkis_ps_datasource_import CASCADE; +CREATE TABLE linkis_ps_datasource_import ( + id bigint(20) AUTO_INCREMENT, + table_id bigint(20) , + import_type int(4) , + args varchar(255) , + PRIMARY KEY (id) +) ; + +DROP TABLE IF EXISTS linkis_ps_datasource_lineage CASCADE; +CREATE TABLE linkis_ps_datasource_lineage ( + id bigint(20) AUTO_INCREMENT, + table_id bigint(20) DEFAULT NULL, + source_table varchar(64) DEFAULT NULL, + update_time datetime DEFAULT NULL, + PRIMARY KEY (id) +) ; + + +INSERT INTO linkis_ps_datasource_table (database,name,alias,creator,comment,create_time,product_name,project_name,usage,lifecycle,use_way,is_import,model_level,is_external_use,is_partition_table,is_available) VALUES + ('ods_user_md_ind','t_student_temp','t_student_temp','hadoop','','2022-04-14 18:53:20','','','测试',0,2,0,0,0,1,1); +INSERT INTO linkis_ps_datasource_field (table_id,name,alias,type,comment,express,rule,is_partition_field,is_primary,length) VALUES + (1,'name','','string','',null,null,0,0,255); +INSERT INTO linkis_ps_datasource_import (table_id,import_type,args) VALUES (1,1,'where 1=1'); +INSERT INTO linkis_ps_datasource_lineage (table_id,source_table) VALUES (1,'db_test'); + +DROP TABLE IF EXISTS DBS CASCADE; +CREATE TABLE DBS ( + DB_ID bigint(20) , + DESC varchar(256) DEFAULT NULL, + DB_LOCATION_URI varchar(4000) , + NAME varchar(128) DEFAULT NULL, + OWNER_NAME varchar(128) DEFAULT NULL, + OWNER_TYPE varchar(10) DEFAULT NULL, + CTLG_NAME varchar(256) DEFAULT 'hive', + PRIMARY KEY (DB_ID) +) ; + +DROP TABLE IF EXISTS SDS CASCADE; +CREATE TABLE SDS ( + SD_ID bigint(20) , + CD_ID bigint(20) DEFAULT NULL, + INPUT_FORMAT varchar(4000) DEFAULT NULL, + IS_COMPRESSED bit(1) , + IS_STOREDASSUBDIRECTORIES bit(1) , + LOCATION varchar(4000) DEFAULT NULL, + NUM_BUCKETS int(11) , + OUTPUT_FORMAT varchar(4000) DEFAULT NULL, + SERDE_ID bigint(20) DEFAULT NULL, + PRIMARY KEY (SD_ID) +) ; + +DROP TABLE IF EXISTS TBLS CASCADE; +CREATE TABLE TBLS ( + TBL_ID bigint(20) , + CREATE_TIME int(11) , + DB_ID bigint(20) DEFAULT NULL, + LAST_ACCESS_TIME int(11) , + OWNER varchar(767) DEFAULT NULL, + OWNER_TYPE varchar(10) DEFAULT NULL, + RETENTION int(11) , + SD_ID bigint(20) DEFAULT NULL, + TBL_NAME varchar(256) DEFAULT NULL, + TBL_TYPE varchar(128) DEFAULT NULL, + VIEW_EXPANDED_TEXT mediumtext, + VIEW_ORIGINAL_TEXT mediumtext, + IS_REWRITE_ENABLED bit(1) , + PRIMARY KEY (TBL_ID) +) ; + +INSERT INTO DBS (DB_ID,DESC,DB_LOCATION_URI,NAME,OWNER_NAME,OWNER_TYPE,CTLG_NAME) VALUES + (1,'Default Hive database','hdfs://hadoops/user/hive/warehouse','default','public','ROLE','hive'); +INSERT INTO TBLS (TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,OWNER_TYPE,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) VALUES + (1,1648518600,1,0,'hadoop','USER',0,1,'employee','MANAGED_TABLE',NULL,NULL,0); +INSERT INTO SDS (SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) VALUES + (1,1,'org.apache.hadoop.mapred.TextInputFormat',0,0,'hdfs://hadoops/user/hive/warehouse/hivedemo.db/employee',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',1); + +DROP TABLE IF EXISTS ROLES CASCADE; +CREATE TABLE ROLES ( + ROLE_ID bigint(20) , + CREATE_TIME int(11) , + OWNER_NAME varchar(128) DEFAULT NULL, + ROLE_NAME varchar(128) DEFAULT NULL, + PRIMARY KEY (ROLE_ID) +) ; + +DROP TABLE IF EXISTS ROLE_MAP CASCADE; +CREATE TABLE ROLE_MAP ( + ROLE_GRANT_ID bigint(20) , + ADD_TIME int(11) , + GRANT_OPTION smallint(6) , + GRANTOR varchar(128) DEFAULT NULL, + GRANTOR_TYPE varchar(128) DEFAULT NULL, + PRINCIPAL_NAME varchar(128) DEFAULT NULL, + PRINCIPAL_TYPE varchar(128) DEFAULT NULL, + ROLE_ID bigint(20) DEFAULT NULL, + PRIMARY KEY (ROLE_GRANT_ID) +) ; + +DROP TABLE IF EXISTS DB_PRIVS CASCADE; +CREATE TABLE DB_PRIVS ( + DB_GRANT_ID bigint(20) , + CREATE_TIME int(11) , + DB_ID bigint(20) DEFAULT NULL, + GRANT_OPTION smallint(6) , + GRANTOR varchar(128) DEFAULT NULL, + GRANTOR_TYPE varchar(128) DEFAULT NULL, + PRINCIPAL_NAME varchar(128) DEFAULT NULL, + PRINCIPAL_TYPE varchar(128) DEFAULT NULL, + DB_PRIV varchar(128) DEFAULT NULL, + AUTHORIZER varchar(128) DEFAULT NULL, + PRIMARY KEY (DB_GRANT_ID) +) ; + +DROP TABLE IF EXISTS TBL_PRIVS CASCADE; +CREATE TABLE TBL_PRIVS ( + TBL_GRANT_ID bigint(20) , + CREATE_TIME int(11) , + GRANT_OPTION smallint(6) , + GRANTOR varchar(128) DEFAULT NULL, + GRANTOR_TYPE varchar(128) DEFAULT NULL, + PRINCIPAL_NAME varchar(128) DEFAULT NULL, + PRINCIPAL_TYPE varchar(128) DEFAULT NULL, + TBL_PRIV varchar(128) DEFAULT NULL, + TBL_ID bigint(20) DEFAULT NULL, + AUTHORIZER varchar(128) DEFAULT NULL, + PRIMARY KEY (TBL_GRANT_ID) +) ; + +DROP TABLE IF EXISTS PARTITION_PARAMS CASCADE; +CREATE TABLE PARTITION_PARAMS ( + PART_ID bigint(20) , + PARAM_KEY varchar(256) , + PARAM_VALUE varchar(4000) DEFAULT NULL, + PRIMARY KEY (PART_ID,PARAM_KEY) +) ; + +DROP TABLE IF EXISTS PARTITIONS CASCADE; +CREATE TABLE PARTITIONS ( + PART_ID bigint(20) , + CREATE_TIME int(11) , + LAST_ACCESS_TIME int(11) , + PART_NAME varchar(767) DEFAULT NULL, + SD_ID bigint(20) DEFAULT NULL, + TBL_ID bigint(20) DEFAULT NULL, + PRIMARY KEY (PART_ID) +) ; + +DROP TABLE IF EXISTS COLUMNS_V2 CASCADE; +CREATE TABLE COLUMNS_V2 ( + CD_ID bigint(20) , + COMMENT varchar(256) DEFAULT NULL, + COLUMN_NAME varchar(767) , + TYPE_NAME mediumtext , + INTEGER_IDX int(11) , + PRIMARY KEY (CD_ID,COLUMN_NAME) +) ; + +DROP TABLE IF EXISTS PARTITION_KEYS CASCADE; +CREATE TABLE PARTITION_KEYS ( + TBL_ID bigint(20) , + PKEY_COMMENT varchar(4000) DEFAULT NULL, + PKEY_NAME varchar(128), + PKEY_TYPE varchar(767) , + INTEGER_IDX int(11) , + PRIMARY KEY (TBL_ID,PKEY_NAME) +) ; + +INSERT INTO ROLES (ROLE_ID,CREATE_TIME,OWNER_NAME,ROLE_NAME) VALUES (2,1647872356,'public','public'); +INSERT INTO PARTITION_PARAMS (PART_ID,PARAM_KEY,PARAM_VALUE) VALUES (3,'totalSize',3); +INSERT INTO PARTITIONS (PART_ID,CREATE_TIME,LAST_ACCESS_TIME,PART_NAME,SD_ID,TBL_ID) VALUES + (3,1650266917,0,'ds=202202',1,1); +INSERT INTO COLUMNS_V2 (CD_ID,COMMENT,COLUMN_NAME,TYPE_NAME,INTEGER_IDX) VALUES (1,'','destination','string',0); +INSERT INTO PARTITION_KEYS (TBL_ID,PKEY_COMMENT,PKEY_NAME,PKEY_TYPE,INTEGER_IDX) VALUES + (1,'','ds','string',0); \ No newline at end of file diff --git a/linkis-dist/package/conf/linkis-cg-engineplugin.properties b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/linkis.properties similarity index 56% rename from linkis-dist/package/conf/linkis-cg-engineplugin.properties rename to linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/linkis.properties index 2919ccf0146..1c575edc5b8 100644 --- a/linkis-dist/package/conf/linkis-cg-engineplugin.properties +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/linkis.properties @@ -13,16 +13,9 @@ # limitations under the License. # -# +#wds.linkis.test.mode=true +wds.linkis.server.version=v1 + +#test wds.linkis.test.mode=true -wds.linkis.engineconn.debug.enable=true -##mybatis -wds.linkis.server.mybatis.mapperLocations=classpath:org/apache/linkis/engineplugin/server/dao/impl/*.xml -wds.linkis.server.mybatis.typeAliasesPackage= -wds.linkis.server.mybatis.BasePackage=org.apache.linkis.engineplugin.server.dao -wds.linkis.engineConn.plugin.cache.expire-in-seconds=100000 -wds.linkis.engineConn.dist.load.enable=true -#wds.linkis.engineconn.home=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins -#wds.linkis.engineconn.plugin.loader.store.path=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins -##Spring -spring.server.port=9103 \ No newline at end of file +wds.linkis.test.user=hadoop \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-datasource/pom.xml b/linkis-public-enhancements/linkis-datasource/pom.xml index 531c903a8ec..e2db91bb43b 100644 --- a/linkis-public-enhancements/linkis-datasource/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/pom.xml @@ -34,6 +34,7 @@ linkis-metadata-query/service/hive linkis-metadata-query/service/kafka linkis-metadata-query/service/mysql + linkis-metadata-query/service/hdfs linkis-metadata-query/server linkis-datasource-client diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/pom.xml b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/pom.xml index 44c24c830be..416d236eda1 100644 --- a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/pom.xml +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-error-code-client diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/ClientConfigurationTest.java b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/ClientConfigurationTest.java new file mode 100644 index 00000000000..7327e62ba24 --- /dev/null +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/ClientConfigurationTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.errorcode.client; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ClientConfigurationTest { + + @Test + @DisplayName("commonConstTest") + public void commonConstTest() { + + String linkisGatewayUrl = ClientConfiguration.LINKIS_GATEWAY_URL.getValue(); + String errorCodeUrlPrefix = ClientConfiguration.ERRORCODE_URL_PREFIX.getValue(); + String errorCodeGetUrl = ClientConfiguration.ERRORCODE_GET_URL.getValue(); + Long defaultConnectTimeOut = ClientConfiguration.DEFAULT_CONNECT_TIME_OUT.getValue(); + Long defaultReadTimeOut = ClientConfiguration.DEFAULT_READ_TIME_OUT.getValue(); + String authTokenValue = ClientConfiguration.AUTH_TOKEN_VALUE.getValue(); + Long futureTimeOut = ClientConfiguration.FUTURE_TIME_OUT.getValue(); + + Assertions.assertNotNull(linkisGatewayUrl); + Assertions.assertNotNull(errorCodeUrlPrefix); + Assertions.assertNotNull(errorCodeGetUrl); + Assertions.assertTrue(defaultConnectTimeOut.longValue() == 600000L); + Assertions.assertTrue(defaultReadTimeOut == 600000L); + Assertions.assertNotNull(authTokenValue); + Assertions.assertTrue(futureTimeOut.longValue() == 2000L); + } +} diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/ErrorCodeClientBuilderTest.java b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/ErrorCodeClientBuilderTest.java new file mode 100644 index 00000000000..fc75ebd48e6 --- /dev/null +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/ErrorCodeClientBuilderTest.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.errorcode.client; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ErrorCodeClientBuilderTest { + + @Test + @DisplayName("buildTest") + public void buildTest() { + + LinkisErrorCodeClient linkisErrorCodeClient = new ErrorCodeClientBuilder().build(); + Assertions.assertNotNull(linkisErrorCodeClient); + } + + @Test + @DisplayName("setVersionTest") + public void setVersionTest() { + String version = "v2"; + LinkisErrorCodeClient linkisErrorCodeClient = + new ErrorCodeClientBuilder().setVersion(version).build(); + Assertions.assertNotNull(linkisErrorCodeClient); + } +} diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/LinkisErrorCodeClientTest.java b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/LinkisErrorCodeClientTest.java new file mode 100644 index 00000000000..11896652f48 --- /dev/null +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/LinkisErrorCodeClientTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.errorcode.client; + +import org.apache.linkis.errorcode.client.action.ErrorCodeGetAllAction; +import org.apache.linkis.errorcode.common.LinkisErrorCode; +import org.apache.linkis.httpclient.dws.DWSHttpClient; + +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +public class LinkisErrorCodeClientTest { + + @Test + @DisplayName("getErrorCodesFromServerTest") + public void getErrorCodesFromServerTest() { + + // Simulated pile driving + DWSHttpClient dwsHttpClient = Mockito.mock(DWSHttpClient.class); + Mockito.when(dwsHttpClient.execute(Mockito.any(ErrorCodeGetAllAction.class))).thenReturn(null); + LinkisErrorCodeClient linkisErrorCodeClient = new LinkisErrorCodeClient(dwsHttpClient); + List codes = linkisErrorCodeClient.getErrorCodesFromServer(); + Assertions.assertTrue(codes.size() == 0); + } +} diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/handler/ErrorCodeHandlerTest.java b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/handler/ErrorCodeHandlerTest.java new file mode 100644 index 00000000000..c58a49109e9 --- /dev/null +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/handler/ErrorCodeHandlerTest.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.errorcode.client.handler; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ErrorCodeHandlerTest { + + @Test + @DisplayName("constTest") + public void constTest() { + String errorCodePre = ErrorCodeHandler.ERROR_CODE_PRE; + String errorCodeOk = ErrorCodeHandler.ERROR_CODE_OK; + String errorCodeFailed = ErrorCodeHandler.ERROR_CODE_FAILED; + + Assertions.assertNotNull(errorCodePre); + Assertions.assertNotNull(errorCodeOk); + Assertions.assertNotNull(errorCodeFailed); + } +} diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/handler/LinkisErrorCodeHandlerTest.java b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/handler/LinkisErrorCodeHandlerTest.java new file mode 100644 index 00000000000..cd2b9b9ac05 --- /dev/null +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/handler/LinkisErrorCodeHandlerTest.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.errorcode.client.handler; + +import org.apache.linkis.errorcode.common.ErrorCode; + +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class LinkisErrorCodeHandlerTest { + + @Test + @DisplayName("getInstanceTest") + public void getInstanceTest() { + + LinkisErrorCodeHandler instance = LinkisErrorCodeHandler.getInstance(); + Assertions.assertNotNull(instance); + } + + @Test + @DisplayName("handleTest") + public void handleTest() { + + String log = + "60001,Session creation failed. The ide queue does not exist. Please check whether the queue settings are correct"; + LinkisErrorCodeHandler instance = LinkisErrorCodeHandler.getInstance(); + List errorCodes = instance.handle(log); + Assertions.assertNotNull(errorCodes); + } +} diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/manager/LinkisErrorCodeManagerTest.java b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/manager/LinkisErrorCodeManagerTest.java new file mode 100644 index 00000000000..096699dda2b --- /dev/null +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/manager/LinkisErrorCodeManagerTest.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.errorcode.client.manager; + +import org.apache.linkis.errorcode.common.LinkisErrorCode; + +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class LinkisErrorCodeManagerTest { + + @Test + @DisplayName("getInstanceTest") + public void getInstanceTest() { + + LinkisErrorCodeManager instance = LinkisErrorCodeManager.getInstance(); + Assertions.assertNotNull(instance); + } + + @Test + @DisplayName("getLinkisErrorCodesTest") + public void getLinkisErrorCodesTest() { + + List errorCodes = LinkisErrorCodeManager.getInstance().getLinkisErrorCodes(); + Assertions.assertTrue(errorCodes.size() == 1); + } +} diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/synchronizer/LinkisErrorCodeSynchronizerTest.java b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/synchronizer/LinkisErrorCodeSynchronizerTest.java new file mode 100644 index 00000000000..eeaa5971c85 --- /dev/null +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/java/org/apache/linkis/errorcode/client/synchronizer/LinkisErrorCodeSynchronizerTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.errorcode.client.synchronizer; + +import org.apache.linkis.errorcode.common.LinkisErrorCode; + +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class LinkisErrorCodeSynchronizerTest { + + @Test + @DisplayName("getInstanceTest") + public void getInstanceTest() { + + LinkisErrorCodeSynchronizer instance = LinkisErrorCodeSynchronizer.getInstance(); + Assertions.assertNotNull(instance); + } + + @Test + @DisplayName("synchronizeErrorCodesTest") + public void synchronizeErrorCodesTest() { + + List errorCodes = + LinkisErrorCodeSynchronizer.getInstance().synchronizeErrorCodes(); + Assertions.assertTrue(errorCodes.size() == 1); + } +} diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/scala/org/apache/linkis/errorcode/client/action/ErrorCodeActionTest.scala b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/scala/org/apache/linkis/errorcode/client/action/ErrorCodeActionTest.scala new file mode 100644 index 00000000000..331acdc198b --- /dev/null +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/test/scala/org/apache/linkis/errorcode/client/action/ErrorCodeActionTest.scala @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.errorcode.client.action + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class ErrorCodeActionTest { + + @Test + @DisplayName("constTest") + def constTest(): Unit = { + + val errorCodeAction = ErrorCodeGetAllAction() + val url = errorCodeAction.getURL + val user = errorCodeAction.getUser + val urlPrefix = errorCodeAction.urlPrefix + val allUrlSuffix = errorCodeAction.getAllUrlSuffix + + Assertions.assertNotNull(url) + Assertions.assertNotNull(urlPrefix) + Assertions.assertNotNull(allUrlSuffix) + Assertions.assertEquals("hadoop", user) + + } + +} diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/pom.xml b/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/pom.xml index c45e5b2c611..23abf0f6f64 100644 --- a/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/pom.xml +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-error-code-common diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/src/main/scala/org/apache/linkis/errorcode/common/LinkisErrorCode.scala b/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/src/main/scala/org/apache/linkis/errorcode/common/LinkisErrorCode.scala index b9df7ebc8ed..8344eb4f9d8 100644 --- a/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/src/main/scala/org/apache/linkis/errorcode/common/LinkisErrorCode.scala +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/src/main/scala/org/apache/linkis/errorcode/common/LinkisErrorCode.scala @@ -17,8 +17,6 @@ package org.apache.linkis.errorcode.common -import org.apache.linkis.common.utils.Logging - import scala.util.matching.Regex class LinkisErrorCode extends AbstractErrorCode { diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/src/test/java/org/apache/linkis/errorcode/common/CommonConfTest.java b/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/src/test/java/org/apache/linkis/errorcode/common/CommonConfTest.java new file mode 100644 index 00000000000..f24bfe4a922 --- /dev/null +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/src/test/java/org/apache/linkis/errorcode/common/CommonConfTest.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.errorcode.common; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class CommonConfTest { + + @Test + @DisplayName("constTest") + public void constTest() { + + String errorcodeUrl = CommonConf.GET_ERRORCODE_URL; + Assertions.assertEquals("getAllErrorCodes", errorcodeUrl); + } +} diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/src/test/scala/org/apache/linkis/errorcode/common/LinkisErrorCodeTest.scala b/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/src/test/scala/org/apache/linkis/errorcode/common/LinkisErrorCodeTest.scala new file mode 100644 index 00000000000..6e82f0ebccc --- /dev/null +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-common/src/test/scala/org/apache/linkis/errorcode/common/LinkisErrorCodeTest.scala @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.errorcode.common + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class LinkisErrorCodeTest { + + @Test + @DisplayName("linkisErrorCodeTest") + def linkisErrorCodeTest(): Unit = { + val errorCode = new LinkisErrorCode( + "11000", + "Failed to get datasource info from datasource server(从数据源服务器获取数据源信息失败)" + ) + Assertions.assertEquals("11000", errorCode.getErrorCode) + + } + +} diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-server/pom.xml b/linkis-public-enhancements/linkis-error-code/linkis-error-code-server/pom.xml index f1b75f6087f..7632755c032 100644 --- a/linkis-public-enhancements/linkis-error-code/linkis-error-code-server/pom.xml +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-server/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-error-code-server diff --git a/linkis-public-enhancements/linkis-error-code/pom.xml b/linkis-public-enhancements/linkis-error-code/pom.xml index ffa6e997ee3..1c8f1e612c4 100644 --- a/linkis-public-enhancements/linkis-error-code/pom.xml +++ b/linkis-public-enhancements/linkis-error-code/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-error-code diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-client/pom.xml b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-client/pom.xml index 8a6cff28931..2034460f5dd 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-client/pom.xml +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-client/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-instance-label-client diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/errorcode/LinkisInstanceLabelErrorCodeSummary.java b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/errorcode/LinkisInstanceLabelErrorCodeSummary.java new file mode 100644 index 00000000000..425d539a321 --- /dev/null +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/errorcode/LinkisInstanceLabelErrorCodeSummary.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.instance.label.errorcode; + +public enum LinkisInstanceLabelErrorCodeSummary { + INSERT_SERVICE_INSTANCE( + 14100, + "Failed to insert service instance(插入服务实例失败)", + "Failed to insert service instance(插入服务实例失败)"), + ONLY_ADMIN_CAN_VIEW( + 14100, + "Only admin can view all instances(只有管理员才能查看所有实例).", + "Only admin can view all instances(只有管理员才能查看所有实例)."), + ONLY_ADMIN_CAN_MODIFY( + 14100, + "Only admin can modify instance label(只有管理员才能修改标签).", + "Only admin can modify instance label(只有管理员才能修改标签)."), + INCLUDE_REPEAT( + 14100, + "Failed to update label, include repeat label(更新label失败,包含重复label)", + "Failed to update label, include repeat label(更新label失败,包含重复label)"), + Express_All(14100, "", ""); + + /** (errorCode)错误码 */ + private int errorCode; + /** (errorDesc)错误描述 */ + private String errorDesc; + /** Possible reasons for the error(错误可能出现的原因) */ + private String comment; + + LinkisInstanceLabelErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/exception/InstanceErrorException.java b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/exception/InstanceErrorException.java index b541822e055..9fe12883bbd 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/exception/InstanceErrorException.java +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/exception/InstanceErrorException.java @@ -19,6 +19,8 @@ import org.apache.linkis.common.exception.ErrorException; +import static org.apache.linkis.instance.label.errorcode.LinkisInstanceLabelErrorCodeSummary.Express_All; + public class InstanceErrorException extends ErrorException { public InstanceErrorException(int errCode, String desc) { @@ -31,6 +33,6 @@ public InstanceErrorException(String desc, Throwable t) { } public InstanceErrorException(String desc) { - super(14100, desc); + super(Express_All.getErrorCode(), desc); } } diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java index 4f1a7cbe189..9c7228a4897 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java @@ -53,6 +53,8 @@ import java.util.*; +import static org.apache.linkis.instance.label.errorcode.LinkisInstanceLabelErrorCodeSummary.*; + @Api(tags = "instance restful") @RestController @RequestMapping(path = "/microservice") @@ -72,7 +74,7 @@ public Message listAllInstanceWithLabel(HttpServletRequest req) throws Exception if (!Configuration.isAdmin(userName)) { throw new InstanceErrorException( String.format( - "Only admin can view all instances(只有管理员才能查看所有实例). The user [%s] is not admin.", + ONLY_ADMIN_CAN_VIEW.getErrorDesc()+"The user [%s] is not admin.", userName)); } @@ -100,7 +102,7 @@ public Message upDateInstanceLabel(HttpServletRequest req, @RequestBody JsonNode if (!Configuration.isAdmin(userName)) { throw new InstanceErrorException( String.format( - "Only admin can modify instance label(只有管理员才能修改标签). The user [%s] is not admin", + ONLY_ADMIN_CAN_MODIFY.getErrorDesc()+" The user [%s] is not admin", userName)); } String instanceName = jsonNode.get("instance").asText(); @@ -133,7 +135,7 @@ public Message upDateInstanceLabel(HttpServletRequest req, @RequestBody JsonNode } if (labelKeySet.size() != labels.size()) { throw new InstanceErrorException( - "Failed to update label, include repeat label(更新label失败,包含重复label)"); + INCLUDE_REPEAT.getErrorDesc()); } insLabelService.refreshLabelsToInstance(labels, instance); InstanceInfo instanceInfo = insLabelService.getInstanceInfoByServiceInstance(instance); diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java index 7a0b4d8315c..b2bde3849e2 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java @@ -59,6 +59,7 @@ import org.slf4j.LoggerFactory; import static org.apache.commons.lang3.math.NumberUtils.isCreatable; +import static org.apache.linkis.instance.label.errorcode.LinkisInstanceLabelErrorCodeSummary.INSERT_SERVICE_INSTANCE; @AdapterMode @EnableAspectJAutoProxy(proxyTargetClass = true, exposeProxy = true) @@ -406,7 +407,7 @@ private void doInsertInstance(ServiceInstance serviceInstance) throws InstanceEr try { instanceDao.insertOne(new InstanceInfo(serviceInstance)); } catch (Exception e) { - throw new InstanceErrorException("Failed to insert service instance", e); + throw new InstanceErrorException(INSERT_SERVICE_INSTANCE.getErrorDesc(), e); } } diff --git a/linkis-public-enhancements/linkis-jobhistory/pom.xml b/linkis-public-enhancements/linkis-jobhistory/pom.xml index 51025c406c4..a58901736ac 100644 --- a/linkis-public-enhancements/linkis-jobhistory/pom.xml +++ b/linkis-public-enhancements/linkis-jobhistory/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-jobhistory diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/impl/JobHistoryMapper.xml b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/impl/JobHistoryMapper.xml index 7c0ea434fe1..6eb48f122e5 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/impl/JobHistoryMapper.xml +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/impl/JobHistoryMapper.xml @@ -158,7 +158,7 @@ instances = #{instances}, metrics = #{metrics}, engine_type = #{engineType}, - and execution_code = #{executionCode}, + execution_code = #{executionCode}, SELECT - bdt.`status` + bdt.status FROM - `linkis_ps_job_history_group_history` bdt + linkis_ps_job_history_group_history bdt WHERE id = #{jobId} FOR UPDATE diff --git a/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobHistoryMapperTest.java b/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobHistoryMapperTest.java new file mode 100644 index 00000000000..edf2358649b --- /dev/null +++ b/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobHistoryMapperTest.java @@ -0,0 +1,168 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.jobhistory.dao; + +import org.apache.linkis.jobhistory.entity.JobHistory; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class JobHistoryMapperTest extends BaseDaoTest { + + private static final Logger LOG = LoggerFactory.getLogger(JobHistoryMapperTest.class); + + @Autowired private JobHistoryMapper jobHistoryMapper; + + private JobHistory createJobHistory() { + JobHistory jobHistory = new JobHistory(); + jobHistory.setJobReqId("LINKISCLI_hadoop_spark_1"); + jobHistory.setSubmitUser("hadoop"); + jobHistory.setExecuteUser("hadoop"); + jobHistory.setSource("{\"scriptPath\":\"LinkisCli\",\"requestIP\":\"127.0.0.1\"}"); + jobHistory.setLabels( + "{\"userCreator\":\"hadoop-LINKISCLI\",\"engineType\":\"spark-3.0.1\",\"codeType\":\"sql\",\"executeOnce\":\"\"}"); + jobHistory.setParams( + "{\"configuration\":{\"startup\":{},\"runtime\":{\"hive.resultset.use.unique.column.names\":true,\"wds.linkis.resultSet.store.path\":\"hdfs:///tmp/linkis/hadoop/linkis/20220714_190204/LINKISCLI/3\",\"source\":{\"scriptPath\":\"LinkisCli\",\"requestIP\":\"127.0.0.1\"},\"job\":{\"resultsetIndex\":0,\"#rt_rs_store_path\":\"hdfs:///tmp/linkis/hadoop/linkis/20220714_190204/LINKISCLI/3\"}}},\"variable\":{}}"); + jobHistory.setParams("1.0"); + jobHistory.setStatus("Succeed"); + jobHistory.setLogPath("hdfs:///tmp/linkis/log/2022-07-14/LINKISCLI/hadoop/3.log"); + jobHistory.setErrorCode(0); + jobHistory.setCreatedTime(new Date()); + jobHistory.setUpdatedTime(new Date()); + jobHistory.setInstances("127.0.0.1:9104"); + jobHistory.setMetrics( + "{\"scheduleTime\":\"2022-07-14T19:02:05+0800\",\"timeToOrchestrator\":\"2022-07-14T19:02:05+0800\",\"submitTime\":\"2022-07-14T19:02:04+0800\",\"yarnResource\":{\"application_1657595967414_0005\":{\"queueMemory\":1073741824,\"queueCores\":1,\"queueInstances\":0,\"jobStatus\":\"COMPLETED\",\"queue\":\"default\"}},\"completeTime\":\"2022-07-14T19:03:08+0800\"}"); + jobHistory.setEngineType("spark"); + jobHistory.setExecutionCode("show databases;"); + jobHistory.setResultLocation("hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1"); + return jobHistory; + } + + @Test + @DisplayName("selectJobHistoryTest") + public void selectJobHistoryTest() { + JobHistory jobHistory = new JobHistory(); + jobHistory.setId(1L); + List histories = jobHistoryMapper.selectJobHistory(jobHistory); + Assertions.assertTrue(histories.size() > 0); + } + + @Test + @DisplayName("insertJobHistoryTest") + public void insertJobHistoryTest() { + + JobHistory jobHistory = createJobHistory(); + jobHistoryMapper.insertJobHistory(jobHistory); + List histories = jobHistoryMapper.selectJobHistory(jobHistory); + Assertions.assertTrue(histories.size() == 1); + } + + @Test + @DisplayName("updateJobHistoryTest") + public void updateJobHistoryTest() { + JobHistory jobHistory = createJobHistory(); + jobHistory.setId(1L); + jobHistoryMapper.updateJobHistory(jobHistory); + List histories = jobHistoryMapper.selectJobHistory(jobHistory); + Assertions.assertEquals("LINKISCLI_hadoop_spark_1", histories.get(0).getJobReqId()); + } + + @Test + @DisplayName("searchWithIdOrderAscTest") + public void searchWithIdOrderAscTest() { + + List status = new ArrayList<>(); + status.add("Succeed"); + List histories = + jobHistoryMapper.searchWithIdOrderAsc(1L, "hadoop", status, null, null, "spark"); + Assertions.assertTrue(histories.size() > 0); + } + + @Test + @DisplayName("searchTest") + public void searchTest() { + + List status = new ArrayList<>(); + status.add("Succeed"); + List histories = + jobHistoryMapper.search(1L, "hadoop", status, null, null, "spark", 1L); + Assertions.assertTrue(histories.size() > 0); + } + + @Test + @DisplayName("searchWithUserCreatorTest") + public void searchWithUserCreatorTest() { + + List status = new ArrayList<>(); + status.add("Succeed"); + List histories = + jobHistoryMapper.searchWithUserCreator( + 1L, "hadoop", null, null, status, null, null, "spark", 1L); + Assertions.assertTrue(histories.size() > 0); + } + + @Test + @DisplayName("searchWithCreatorOnlyTest") + public void searchWithCreatorOnlyTest() { + + List status = new ArrayList<>(); + status.add("Succeed"); + List histories = + jobHistoryMapper.searchWithCreatorOnly( + 1L, "hadoop", null, "hadoop", status, null, null, "spark", 1L); + Assertions.assertTrue(histories.size() > 0); + } + + @Test + @DisplayName("countUndoneTaskNoCreatorTest") + public void countUndoneTaskNoCreatorTest() { + List status = new ArrayList<>(); + status.add("Succeed"); + Integer counts = + jobHistoryMapper.countUndoneTaskNoCreator("hadoop", status, null, null, "spark", 1L); + Assertions.assertTrue(counts.intValue() > 0); + } + + @Test + @DisplayName("countUndoneTaskWithUserCreatorTest") + public void countUndoneTaskWithUserCreatorTest() { + List status = new ArrayList<>(); + status.add("Succeed"); + Integer counts = + jobHistoryMapper.countUndoneTaskWithUserCreator( + "hadoop", null, "hadoop", status, null, null, "spark", 1L); + Assertions.assertTrue(counts.intValue() > 0); + } + + @Test + @DisplayName("selectJobHistoryStatusForUpdateTest") + public void selectJobHistoryStatusForUpdateTest() { + + String status = jobHistoryMapper.selectJobHistoryStatusForUpdate(1L); + Assertions.assertEquals("Succeed", status); + } +} diff --git a/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/service/JobHistoryQueryServiceTest.java b/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/service/JobHistoryQueryServiceTest.java new file mode 100644 index 00000000000..810c30e859f --- /dev/null +++ b/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/service/JobHistoryQueryServiceTest.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.jobhistory.service; + +import org.apache.linkis.governance.common.entity.job.JobRequest; +import org.apache.linkis.governance.common.protocol.job.*; +import org.apache.linkis.jobhistory.dao.JobHistoryMapper; +import org.apache.linkis.jobhistory.entity.JobHistory; +import org.apache.linkis.jobhistory.service.impl.JobHistoryQueryServiceImpl; + +import java.util.*; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class JobHistoryQueryServiceTest { + + @InjectMocks JobHistoryQueryServiceImpl jobRequestQueryService; + + @Mock JobHistoryMapper jobRequestMapper; + + private JobRequest createJobRequest() { + JobRequest jobRequest = new JobRequest(); + jobRequest.setReqId("LINKISCLI_hadoop_spark_1"); + jobRequest.setSubmitUser("hadoop"); + jobRequest.setExecuteUser("hadoop"); + jobRequest.setSource(new HashMap<>()); + jobRequest.setLabels(new ArrayList<>()); + jobRequest.setParams(new HashMap<>()); + jobRequest.setStatus("Succeed"); + jobRequest.setLogPath("hdfs:///tmp/linkis/log/2022-07-14/LINKISCLI/hadoop/3.log"); + jobRequest.setErrorCode(0); + jobRequest.setCreatedTime(new Date()); + jobRequest.setUpdatedTime(new Date()); + jobRequest.setInstances("127.0.0.1:9104"); + jobRequest.setMetrics(new HashMap<>()); + jobRequest.setExecutionCode("show databases;"); + jobRequest.setResultLocation("hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1"); + return jobRequest; + } + + @Test + @DisplayName("addTest") + public void addTest() { + JobReqInsert jobReqInsert = new JobReqInsert(createJobRequest()); + JobRespProtocol jobRespProtocol = jobRequestQueryService.add(jobReqInsert); + Assertions.assertNotNull(jobRespProtocol); + } + + @Test + @DisplayName("changeTest") + public void changeTest() { + JobReqUpdate jobReqUpdate = new JobReqUpdate(createJobRequest()); + JobRespProtocol jobRespProtocol = jobRequestQueryService.change(jobReqUpdate); + Assertions.assertNotNull(jobRespProtocol); + } + + @Test + @DisplayName("batchChangeTest") + public void batchChangeTest() { + + JobReqBatchUpdate jobReqBatchUpdate = + new JobReqBatchUpdate(new ArrayList<>(Arrays.asList(createJobRequest()))); + ArrayList protocols = jobRequestQueryService.batchChange(jobReqBatchUpdate); + Assertions.assertTrue(protocols.size() > 0); + } + + @Test + @DisplayName("queryTest") + public void queryTest() { + JobReqQuery jobReqQuery = new JobReqQuery(createJobRequest()); + JobRespProtocol jobRespProtocol = jobRequestQueryService.query(jobReqQuery); + Assertions.assertNotNull(jobRespProtocol); + } + + @Test + @DisplayName("getJobHistoryByIdAndNameTest") + public void getJobHistoryByIdAndNameTest() { + JobHistory history = jobRequestQueryService.getJobHistoryByIdAndName(1L, "hadoop"); + Assertions.assertNull(history); + } + + @Test + @DisplayName("searchTest") + public void searchTest() { + List histories = + jobRequestQueryService.search( + 1L, "hadoop", "hadoop", "Succeed", new Date(), new Date(), "spark", 1L); + Assertions.assertTrue(histories.size() == 0); + } + + @Test + @DisplayName("countUndoneTasksTest") + public void countUndoneTasksTest() { + + Integer counts = + jobRequestQueryService.countUndoneTasks( + "hadoop", "hadoop", new Date(), new Date(), "spark", 1L); + Assertions.assertTrue(counts.intValue() == 0); + } + + @Test + @DisplayName("searchOneTest") + public void searchOneTest() { + + JobHistory jobHistory = jobRequestQueryService.searchOne(1L, new Date(), new Date()); + Assertions.assertNotNull(jobHistory); + } +} diff --git a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql b/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql index 64a18a04bac..a27c1840636 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql @@ -31,4 +31,33 @@ CREATE TABLE IF NOT EXISTS linkis_ps_job_history_detail ( status varchar(32), priority integer, PRIMARY KEY (id) -); \ No newline at end of file +); + +DROP TABLE IF EXISTS linkis_ps_job_history_group_history CASCADE; +CREATE TABLE linkis_ps_job_history_group_history ( + id bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary Key, auto increment', + job_req_id varchar(64) DEFAULT NULL COMMENT 'job execId', + submit_user varchar(50) DEFAULT NULL COMMENT 'who submitted this Job', + execute_user varchar(50) DEFAULT NULL COMMENT 'who actually executed this Job', + source text COMMENT 'job source', + labels text COMMENT 'job labels', + params text COMMENT 'job params', + progress varchar(32) DEFAULT NULL COMMENT 'Job execution progress', + status varchar(50) DEFAULT NULL COMMENT 'Script execution status, must be one of the following: Inited, WaitForRetry, Scheduled, Running, Succeed, Failed, Cancelled, Timeout', + log_path varchar(200) DEFAULT NULL COMMENT 'File path of the job log', + error_code int(11) DEFAULT NULL COMMENT 'Error code. Generated when the execution of the script fails', + error_desc varchar(1000) DEFAULT NULL COMMENT 'Execution description. Generated when the execution of script fails', + created_time datetime(3) DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Creation time', + updated_time datetime(3) DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Update time', + instances varchar(250) DEFAULT NULL COMMENT 'Entrance instances', + metrics text COMMENT 'Job Metrics', + engine_type varchar(32) DEFAULT NULL COMMENT 'Engine type', + execution_code text COMMENT 'Job origin code or code path', + result_location varchar(500) DEFAULT NULL COMMENT 'File path of the resultsets', + PRIMARY KEY (id), + KEY created_time (created_time), + KEY submit_user (submit_user) +) ; + +INSERT INTO linkis_ps_job_history_group_history (job_req_id,submit_user,execute_user,source,labels,params,progress,status,log_path,error_code,error_desc,created_time,updated_time,instances,metrics,engine_type,execution_code,result_location) VALUES + ('LINKISCLI_hadoop_spark_0','hadoop','hadoop','{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"}','{"userCreator":"hadoop-LINKISCLI","engineType":"spark-3.0.1","codeType":"sql","executeOnce":""}','{"configuration":{"startup":{},"runtime":{"hive.resultset.use.unique.column.names":true,"wds.linkis.resultSet.store.path":"hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1","source":{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"},"job":{"resultsetIndex":0,"#rt_rs_store_path":"hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1"}}},"variable":{}}','1.0','Succeed','hdfs:///tmp/linkis/log/2022-07-14/LINKISCLI/hadoop/1.log',0,'','2022-07-14 18:58:39.019000000','2022-07-14 18:59:51.589000000','127.0.0.1:9104','{"scheduleTime":"2022-07-14T18:58:40+0800","timeToOrchestrator":"2022-07-14T18:58:41+0800","submitTime":"2022-07-14T18:58:39+0800","yarnResource":{"application_1657595967414_0003":{"queueMemory":1073741824,"queueCores":1,"queueInstances":0,"jobStatus":"RUNNING","queue":"default"}},"completeTime":"2022-07-14T18:59:51+0800"}','spark','show databases;','hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1'); diff --git a/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/conf/JobhistoryConfigurationTest.scala b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/conf/JobhistoryConfigurationTest.scala new file mode 100644 index 00000000000..df65f8015d3 --- /dev/null +++ b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/conf/JobhistoryConfigurationTest.scala @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.jobhistory.conf + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class JobhistoryConfigurationTest { + + @Test + @DisplayName("constTest") + def constTest(): Unit = { + + val governaceStationAdmin = JobhistoryConfiguration.GOVERNANCE_STATION_ADMIN.getValue + val jobHistorySafeTrigger = JobhistoryConfiguration.JOB_HISTORY_SAFE_TRIGGER + val entranceSpringName = JobhistoryConfiguration.ENTRANCE_SPRING_NAME.getValue + val entranceInstanceDelemiter = JobhistoryConfiguration.ENTRANCE_INSTANCE_DELEMITER.getValue + val updateRetryTimes = JobhistoryConfiguration.UPDATE_RETRY_TIMES.getValue + val updateRetryInterval = JobhistoryConfiguration.UPDATE_RETRY_INTERVAL.getValue + val undoneJobMinimum = JobhistoryConfiguration.UNDONE_JOB_MINIMUM_ID.getValue + val undoneJobRefreshTimeDaily = JobhistoryConfiguration.UNDONE_JOB_REFRESH_TIME_DAILY.getValue + + Assertions.assertEquals("hadoop", governaceStationAdmin) + Assertions.assertTrue(jobHistorySafeTrigger.booleanValue()) + Assertions.assertNotNull(entranceSpringName) + Assertions.assertNotNull(entranceInstanceDelemiter) + + Assertions.assertTrue(updateRetryTimes == 3) + Assertions.assertTrue(updateRetryInterval == 3000) + + Assertions.assertTrue(undoneJobMinimum == 0L) + Assertions.assertNotNull(undoneJobRefreshTimeDaily) + } + +} diff --git a/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryConfigTest.scala b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryConfigTest.scala new file mode 100644 index 00000000000..980623873d2 --- /dev/null +++ b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryConfigTest.scala @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.jobhistory.util + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class QueryConfigTest { + + @Test + @DisplayName("constTest") + def constTest(): Unit = { + val cacheMaxExpireHour = QueryConfig.CACHE_MAX_EXPIRE_HOUR.getValue + val cacheDailyExpireEnable = QueryConfig.CACHE_DAILY_EXPIRE_ENABLED.getValue + val cacheMaxSize = QueryConfig.CACHE_MAX_SIZE.getValue + val cacheCleaningIntervalMinute = QueryConfig.CACHE_CLEANING_INTERVAL_MINUTE.getValue + + Assertions.assertTrue(cacheMaxExpireHour == 1L) + Assertions.assertTrue(cacheDailyExpireEnable) + Assertions.assertTrue(cacheMaxSize == 10000L) + Assertions.assertTrue(cacheCleaningIntervalMinute == 30) + } + +} diff --git a/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryUtilsTest.scala b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryUtilsTest.scala new file mode 100644 index 00000000000..43b74048271 --- /dev/null +++ b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryUtilsTest.scala @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.jobhistory.util + +import java.util.Date + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class QueryUtilsTest { + + @Test + @DisplayName("isJobHistoryAdminTest") + def isJobHistoryAdminTest(): Unit = { + val defaultName = "hadoop" + val otherName = "hadoops" + val defaultVal = QueryUtils.isJobHistoryAdmin(defaultName) + val otherVal = QueryUtils.isJobHistoryAdmin(otherName) + + Assertions.assertTrue(defaultVal) + Assertions.assertFalse(otherVal) + } + + @Test + @DisplayName("getJobHistoryAdminTest") + def getJobHistoryAdminTest(): Unit = { + val admins = QueryUtils.getJobHistoryAdmin() + Assertions.assertTrue(admins.size == 1) + } + + @Test + @DisplayName("dateToStringTest") + def dateToStringTest(): Unit = { + val dateStr = QueryUtils.dateToString(new Date) + Assertions.assertNotNull(dateStr) + } + + @Test + @DisplayName("checkNameValidTest") + def checkNameValidTest(): Unit = { + val name = "hadoops" + val bool = QueryUtils.checkNameValid(name) + Assertions.assertTrue(bool) + } + +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/pom.xml b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/pom.xml index e73eddb5dc6..0a697056369 100644 --- a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/pom.xml +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-storage-script-dev-client diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientFactoryTest.scala b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientFactoryTest.scala new file mode 100644 index 00000000000..c284a14c333 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientFactoryTest.scala @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class WorkspaceClientFactoryTest { + + @Test + @DisplayName("getClientTest") + def getClientTest(): Unit = { + + val user = "hadoop" + val token = "abc" + val gatewayAddress = "127.0.0.1:9001" + + val client = WorkspaceClientFactory.getClient(user, token, gatewayAddress) + Assertions.assertNotNull(client) + } + +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientImplTest.scala b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientImplTest.scala new file mode 100644 index 00000000000..83408ed7e8e --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientImplTest.scala @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class WorkspaceClientImplTest { + + @Test + @DisplayName("createObject") + def createObject(): Unit = { + + val user = "hadoop" + val token = "abc" + val gatewayAddress = "127.0.0.1:9001" + + val client = new WorkspaceClientImpl(user, token, gatewayAddress) + Assertions.assertNotNull(client) + } + +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/action/OpenScriptFromBMLActionTest.scala b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/action/OpenScriptFromBMLActionTest.scala new file mode 100644 index 00000000000..5b5d6cc3230 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/action/OpenScriptFromBMLActionTest.scala @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem.action + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class OpenScriptFromBMLActionTest { + + @Test + @DisplayName("commonConstTest") + def commonConstTest(): Unit = { + val user = "hadoop" + val action = new OpenScriptFromBMLAction + action.setUser(user) + + Assertions.assertEquals(user, action.getUser) + Assertions.assertNotNull(action.getURL) + } + +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/conf/WorkspaceClientConfTest.scala b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/conf/WorkspaceClientConfTest.scala new file mode 100644 index 00000000000..312c1f27d77 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/conf/WorkspaceClientConfTest.scala @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem.conf + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class WorkspaceClientConfTest { + + @Test + @DisplayName("commonConstTest") + def commonConstTest(): Unit = { + + val gateway = WorkspaceClientConf.gateway + val prefix = WorkspaceClientConf.prefix + val scriptFromBML = WorkspaceClientConf.scriptFromBML + val dwsVersion = WorkspaceClientConf.dwsVersion + val tokenKey = WorkspaceClientConf.tokenKey + val tokenValue = WorkspaceClientConf.tokenValue + val scriptFromBMLUrl = WorkspaceClientConf.scriptFromBMLUrl + + Assertions.assertEquals("", gateway) + Assertions.assertNotNull(prefix) + Assertions.assertNotNull(scriptFromBML) + Assertions.assertEquals("v1", dwsVersion) + Assertions.assertNotNull(tokenKey) + Assertions.assertNotNull(tokenValue) + Assertions.assertNotNull(scriptFromBMLUrl) + } + +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/response/ScriptFromBMLResponseTest.scala b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/response/ScriptFromBMLResponseTest.scala new file mode 100644 index 00000000000..c4e9e386cb3 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-client/src/test/scala/org/apache/linkis/filesystem/response/ScriptFromBMLResponseTest.scala @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem.response + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class ScriptFromBMLResponseTest { + + @Test + @DisplayName("commonConst") + def commonConst(): Unit = { + val scriptContent = "show databases;" + val response = new ScriptFromBMLResponse( + scriptContent, + new java.util.HashMap[String, java.util.Map[String, Object]] + ) + + Assertions.assertEquals(scriptContent, response.scriptContent) + Assertions.assertTrue(response.metadata.size() == 0) + + } + +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/pom.xml b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/pom.xml index eb06769a071..7f3a69cac38 100644 --- a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/pom.xml +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-storage-script-dev-server jar diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/bml/BMLHelper.scala b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/bml/BMLHelper.scala index c269c2b3586..7c5cd406999 100644 --- a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/bml/BMLHelper.scala +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/bml/BMLHelper.scala @@ -27,8 +27,6 @@ import java.io.{ByteArrayInputStream, InputStream} import java.util import java.util.UUID -import scala.collection.JavaConverters._ - @Component class BMLHelper { @@ -38,9 +36,10 @@ class BMLHelper { val resource: BmlUploadResponse = client.uploadResource(userName, fileName, inputStream) if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80021) val map = new util.HashMap[String, Object] - map.asScala += "resourceId" -> resource.resourceId - map.asScala += "version" -> resource.version - }.asJava + map.put("resourceId", resource.resourceId) + map.put("version", resource.version) + map + } def upload( userName: String, @@ -53,9 +52,10 @@ class BMLHelper { client.uploadShareResource(userName, projectName, fileName, inputStream) if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80021) val map = new util.HashMap[String, Object] - map.asScala += "resourceId" -> resource.resourceId - map.asScala += "version" -> resource.version - }.asJava + map.put("resourceId", resource.resourceId) + map.put("version", resource.version) + map + } def upload( userName: String, @@ -66,9 +66,10 @@ class BMLHelper { val resource: BmlUploadResponse = client.uploadResource(userName, fileName, inputStream) if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80021) val map = new util.HashMap[String, Object] - map.asScala += "resourceId" -> resource.resourceId - map.asScala += "version" -> resource.version - }.asJava + map.put("resourceId", resource.resourceId) + map.put("version", resource.version) + map + } def update( userName: String, @@ -80,9 +81,10 @@ class BMLHelper { client.updateShareResource(userName, resourceId, "", inputStream) if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80022) val map = new util.HashMap[String, Object] - map.asScala += "resourceId" -> resource.resourceId - map.asScala += "version" -> resource.version - }.asJava + map.put("resourceId", resource.resourceId) + map.put("version", resource.version) + map + } def update(userName: String, resourceId: String, content: String): util.Map[String, Object] = { val inputStream = new ByteArrayInputStream(content.getBytes("utf-8")) @@ -95,9 +97,10 @@ class BMLHelper { ) if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80022) val map = new util.HashMap[String, Object] - map.asScala += "resourceId" -> resource.resourceId - map.asScala += "version" -> resource.version - }.asJava + map.put("resourceId", resource.resourceId) + map.put("version", resource.version) + map + } def query(userName: String, resourceId: String, version: String): util.Map[String, Object] = { val client: BmlClient = createBMLClient(userName) @@ -106,9 +109,10 @@ class BMLHelper { else resource = client.downloadShareResource(userName, resourceId, version) if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80023) val map = new util.HashMap[String, Object] - map.asScala += "path" -> resource.fullFilePath - map.asScala += "stream" -> resource.inputStream - }.asJava + map.put("path", resource.fullFilePath) + map.put("stream", resource.inputStream) + map + } private def inputstremToString(inputStream: InputStream): String = scala.io.Source.fromInputStream(inputStream).mkString diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/service/FsService.scala b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/service/FsService.scala index 574953ce05e..aeaa990e37f 100644 --- a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/service/FsService.scala +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/service/FsService.scala @@ -40,7 +40,7 @@ class FsService extends Logging { if (FsCache.fsInfo.get(user) != null) { // The outer layer does not add more judgments, it is also ok, it is to lock the user's fs group.(外层不加多个判断也ok,都是要锁用户的fs组) FsCache.fsInfo.get(user) synchronized { - if (FsCache.fsInfo.get(user).filter(_.fs.fsName().equals(fsPath.getFsType)).isEmpty) { + if (!FsCache.fsInfo.get(user).exists(_.fs.fsName().equals(fsPath.getFsType))) { FsCache.fsInfo.get(user) += produceFSInfo(user, fsPath) } else { FsCache.fsInfo @@ -52,14 +52,14 @@ class FsService extends Logging { } else { FsCache.fsInfo synchronized { if (FsCache.fsInfo.get(user) == null) { - FsCache.fsInfo.asScala += user -> ArrayBuffer(produceFSInfo(user, fsPath)) + FsCache.fsInfo.put(user, ArrayBuffer(produceFSInfo(user, fsPath))) } } // (43-49) Prevent file and hdfs from entering 37 lines at the same time, causing 51 lines to report the cross mark // (43-49)防止file和hdfs同时进到37行,导致51行报下角标越界 - if (FsCache.fsInfo.get(user).filter(_.fs.fsName().equals(fsPath.getFsType)).isEmpty) { + if (!FsCache.fsInfo.get(user).exists(_.fs.fsName().equals(fsPath.getFsType))) { FsCache.fsInfo.get(user) synchronized { - if (FsCache.fsInfo.get(user).filter(_.fs.fsName().equals(fsPath.getFsType)).isEmpty) { + if (!FsCache.fsInfo.get(user).exists(_.fs.fsName().equals(fsPath.getFsType))) { FsCache.fsInfo.get(user) += produceFSInfo(user, fsPath) } } diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/utils/UserGroupInfo.scala b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/utils/UserGroupInfo.scala index 754fe08f45a..831a96fd2c4 100644 --- a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/utils/UserGroupInfo.scala +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/utils/UserGroupInfo.scala @@ -20,13 +20,14 @@ package org.apache.linkis.filesystem.utils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.filesystem.conf.WorkSpaceConfiguration +import org.apache.commons.lang3.SystemUtils + import java.util import java.util.concurrent.TimeUnit import java.util.concurrent.locks.ReentrantReadWriteLock import scala.collection.mutable.ArrayBuffer -import com.sun.jna.Platform import oshi.util.{ExecutingCommand, FileUtil} /** @@ -61,7 +62,7 @@ class UserGroupInfo extends Logging { private def refreshUserMap(): Unit = { val tmpUsers = new ArrayBuffer[String]() var passwd: util.List[String] = null - if (Platform.isAIX) passwd = FileUtil.readFile("/etc/passwd") + if (SystemUtils.IS_OS_AIX) passwd = FileUtil.readFile("/etc/passwd") else passwd = ExecutingCommand.runNative("getent passwd") val iterator = passwd.iterator while (iterator.hasNext) { diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/Scan.java b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/Scan.java new file mode 100644 index 00000000000..bbb4e427ece --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/Scan.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem; + +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; + +@EnableAutoConfiguration +public class Scan {} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/WebApplicationServer.java b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/WebApplicationServer.java new file mode 100644 index 00000000000..17faf096d54 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/WebApplicationServer.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem; + +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.web.servlet.ServletComponentScan; +import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; +import org.springframework.context.annotation.ComponentScan; + +@EnableAutoConfiguration +@ServletComponentScan +@ComponentScan +public class WebApplicationServer extends SpringBootServletInitializer { + + public static void main(String[] args) { + new SpringApplicationBuilder(WebApplicationServer.class).run(args); + } +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/conf/WorkSpaceConfigurationTest.java b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/conf/WorkSpaceConfigurationTest.java new file mode 100644 index 00000000000..7eda4af72a3 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/conf/WorkSpaceConfigurationTest.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem.conf; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class WorkSpaceConfigurationTest { + + @Test + @DisplayName("staticCommonConst") + public void staticCommonConst() { + + String localUserRootPath = WorkSpaceConfiguration.LOCAL_USER_ROOT_PATH.getValue(); + String hdfsUserRootPathPrefix = WorkSpaceConfiguration.HDFS_USER_ROOT_PATH_PREFIX.getValue(); + String hdfsUserRootPathSuffix = WorkSpaceConfiguration.HDFS_USER_ROOT_PATH_SUFFIX.getValue(); + Boolean resultSetDownloadIsLimit = + WorkSpaceConfiguration.RESULT_SET_DOWNLOAD_IS_LIMIT.getValue(); + Integer resultSetDownloadMaxSizeCsv = + WorkSpaceConfiguration.RESULT_SET_DOWNLOAD_MAX_SIZE_CSV.getValue(); + Integer resultSetDownloadMaxSizeExecl = + WorkSpaceConfiguration.RESULT_SET_DOWNLOAD_MAX_SIZE_EXCEL.getValue(); + Long fileSystemGetTimeOut = WorkSpaceConfiguration.FILESYSTEM_GET_TIMEOUT.getValue(); + Integer fileSystemFsThreadNum = WorkSpaceConfiguration.FILESYSTEM_FS_THREAD_NUM.getValue(); + Integer fileSystemFsThreadCache = WorkSpaceConfiguration.FILESYSTEM_FS_THREAD_CACHE.getValue(); + Boolean filesystemPathCheckTrigger = + WorkSpaceConfiguration.FILESYSTEM_PATH_CHECK_TRIGGER.getValue(); + String filesystemLogAdmin = WorkSpaceConfiguration.FILESYSTEM_LOG_ADMIN.getValue(); + Boolean filesystemPathCheckOwner = + WorkSpaceConfiguration.FILESYSTEM_PATH_CHECK_OWNER.getValue(); + Boolean filesystemPathAutoCreate = + WorkSpaceConfiguration.FILESYSTEM_PATH_AUTO_CREATE.getValue(); + Long localFilesystemUserRefreshInterval = + WorkSpaceConfiguration.LOCAL_FILESYSTEM_USER_REFRESH_INTERVAL.getValue(); + Boolean enableUserGroup = WorkSpaceConfiguration.ENABLE_USER_GROUP.getValue(); + + Assertions.assertNotNull(localUserRootPath); + Assertions.assertNotNull(hdfsUserRootPathPrefix); + Assertions.assertNotNull(hdfsUserRootPathSuffix); + Assertions.assertTrue(resultSetDownloadIsLimit.booleanValue()); + Assertions.assertTrue(resultSetDownloadMaxSizeCsv.intValue() == 5000); + Assertions.assertTrue(resultSetDownloadMaxSizeExecl == 5000); + Assertions.assertTrue(fileSystemGetTimeOut == 10000L); + Assertions.assertTrue(fileSystemFsThreadNum == 10); + Assertions.assertTrue(fileSystemFsThreadCache == 1000); + Assertions.assertFalse(filesystemPathCheckTrigger); + Assertions.assertNotNull(filesystemLogAdmin); + Assertions.assertFalse(filesystemPathCheckOwner.booleanValue()); + Assertions.assertFalse(filesystemPathAutoCreate.booleanValue()); + Assertions.assertTrue(localFilesystemUserRefreshInterval == 1800000L); + Assertions.assertTrue(enableUserGroup.booleanValue()); + } +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/constant/WorkSpaceConstantsTest.java b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/constant/WorkSpaceConstantsTest.java new file mode 100644 index 00000000000..6fe4f8754d1 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/constant/WorkSpaceConstantsTest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem.constant; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class WorkSpaceConstantsTest { + + @Test + @DisplayName("staticCommonConst") + public void staticCommonConst() { + + String xlsxResponseContentType = WorkSpaceConstants.XLSX_RESPONSE_CONTENT_TYPE; + String defaultDateType = WorkSpaceConstants.DEFAULT_DATE_TYPE; + String localReturnType = WorkSpaceConstants.LOCAL_RETURN_TYPE; + String blank = WorkSpaceConstants.BLANK; + + Assertions.assertNotNull(xlsxResponseContentType); + Assertions.assertNotNull(defaultDateType); + Assertions.assertNotNull(localReturnType); + Assertions.assertNotNull(blank); + } +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/exception/WorkSpaceExceptionTest.java b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/exception/WorkSpaceExceptionTest.java new file mode 100644 index 00000000000..e0967e6c4aa --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/exception/WorkSpaceExceptionTest.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem.exception; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class WorkSpaceExceptionTest { + + @Test + @DisplayName("workSpaceExceptionTest") + public void workSpaceExceptionTest() { + + int errorCode = 80001; + String errorMsg = + "Requesting IO-Engine to initialize fileSystem failed!(请求IO-Engine初始化fileSystem失败!)"; + String ip = "127.0.0.1"; + int port = 8081; + String serviceKind = "ps-service"; + WorkSpaceException workSpaceException = new WorkSpaceException(errorCode, errorMsg); + + Assertions.assertTrue(errorCode == workSpaceException.getErrCode()); + Assertions.assertEquals(errorMsg, workSpaceException.getDesc()); + + WorkSpaceException spaceException = + new WorkSpaceException(errorCode, errorMsg, ip, port, serviceKind); + Assertions.assertEquals(ip, spaceException.getIp()); + Assertions.assertTrue(port == spaceException.getPort()); + Assertions.assertEquals(serviceKind, spaceException.getServiceKind()); + } +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/exception/WorkspaceExceptionManagerTest.java b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/exception/WorkspaceExceptionManagerTest.java new file mode 100644 index 00000000000..3263a68202e --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/exception/WorkspaceExceptionManagerTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem.exception; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class WorkspaceExceptionManagerTest { + + private static final Logger LOG = LoggerFactory.getLogger(WorkspaceExceptionManagerTest.class); + + @Test + @DisplayName("createExceptionTest") + public void createExceptionTest() { + + WorkSpaceException exception = WorkspaceExceptionManager.createException(80021, ""); + Assertions.assertTrue(80021 == exception.getErrCode()); + Assertions.assertNotNull(exception.getDesc()); + + Exception nullPointerException = + Assertions.assertThrows( + NullPointerException.class, + () -> WorkspaceExceptionManager.createException(8002100, "")); + LOG.info("assertThrows pass, the error message: {} ", nullPointerException.getMessage()); + } +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/restful/api/BMLFsRestfulApiTest.java b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/restful/api/BMLFsRestfulApiTest.java new file mode 100644 index 00000000000..d2d5f0fadfa --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/restful/api/BMLFsRestfulApiTest.java @@ -0,0 +1,124 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem.restful.api; + +import org.apache.linkis.common.utils.JsonUtils; +import org.apache.linkis.filesystem.Scan; +import org.apache.linkis.filesystem.WebApplicationServer; +import org.apache.linkis.filesystem.bml.BMLHelper; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.MessageStatus; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.http.MediaType; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.web.servlet.MockMvc; +import org.springframework.test.web.servlet.MvcResult; + +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +@ExtendWith(SpringExtension.class) +@SpringBootTest(classes = {WebApplicationServer.class, Scan.class}) +@AutoConfigureMockMvc +public class BMLFsRestfulApiTest { + + private static final Logger LOG = LoggerFactory.getLogger(BMLFsRestfulApiTest.class); + + @Autowired private MockMvc mockMvc; + + @InjectMocks private BMLFsRestfulApi bmlFsRestfulApi; + + @MockBean private BMLHelper bmlHelper; + + @Test + @DisplayName("openScriptFromBMLTest") + public void openScriptFromBMLTest() throws Exception { + String querySql = this.getClass().getResource("/").getPath() + "/query.sql"; + Map query = new HashMap<>(); + InputStream is = new FileInputStream(new File(querySql)); + query.put("stream", is); + query.put("name", "hadoop"); + Mockito.when(bmlHelper.query("hadoop", "1", "1")).thenReturn(query); + + MvcResult mvcResult = + mockMvc + .perform( + get("/filesystem/openScriptFromBML") + .param("fileName", querySql) + .param("resourceId", "1") + .param("version", "1")) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Message res = + JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class); + + assertEquals(MessageStatus.SUCCESS(), res.getStatus()); + LOG.info(mvcResult.getResponse().getContentAsString()); + } + + @Test + @DisplayName("openScriptFromProductBMLTest") + public void openScriptFromProductBMLTest() throws Exception { + + String querySql = this.getClass().getResource("/").getPath() + "/query.sql"; + Map query = new HashMap<>(); + InputStream is = new FileInputStream(new File(querySql)); + query.put("stream", is); + query.put("name", "hadoop"); + Mockito.when(bmlHelper.query("hadoop", "1", "1")).thenReturn(query); + + MvcResult mvcResult = + mockMvc + .perform( + get("/filesystem/product/openScriptFromBML") + .param("fileName", querySql) + .param("resourceId", "1") + .param("version", "1")) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Message res = + JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class); + + assertEquals(MessageStatus.SUCCESS(), res.getStatus()); + LOG.info(mvcResult.getResponse().getContentAsString()); + } +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/restful/api/FsRestfulApiTest.java b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/restful/api/FsRestfulApiTest.java new file mode 100644 index 00000000000..c42466b18cd --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/restful/api/FsRestfulApiTest.java @@ -0,0 +1,227 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem.restful.api; + +import org.apache.linkis.common.io.FsPath; +import org.apache.linkis.common.utils.JsonUtils; +import org.apache.linkis.filesystem.Scan; +import org.apache.linkis.filesystem.WebApplicationServer; +import org.apache.linkis.filesystem.service.FsService; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.MessageStatus; +import org.apache.linkis.storage.fs.FileSystem; +import org.apache.linkis.storage.fs.impl.LocalFileSystem; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.http.MediaType; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.web.servlet.MockMvc; +import org.springframework.test.web.servlet.MvcResult; + +import java.nio.file.Files; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFileAttributes; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +@ExtendWith(SpringExtension.class) +@SpringBootTest(classes = {WebApplicationServer.class, Scan.class}) +@AutoConfigureMockMvc +public class FsRestfulApiTest { + + private static final Logger LOG = LoggerFactory.getLogger(FsRestfulApiTest.class); + + @InjectMocks private FsRestfulApi fsRestfulApi; + + @Autowired private MockMvc mockMvc; + + @MockBean(name = "fsService") + private FsService fsService; + + @Test + @DisplayName("getDirFileTreesTest") + public void getDirFileTreesTest() throws Exception { + + if (!FsPath.WINDOWS) { + FileSystem fs = new LocalFileSystem(); + fs.setUser("docker"); + String group = + Files.readAttributes( + Paths.get(this.getClass().getResource("/").getPath()), PosixFileAttributes.class) + .group() + .getName(); + fs.setGroup(new FsPath(this.getClass().getResource("/").getPath()), group); + + Mockito.when(fsService.getFileSystem(Mockito.anyString(), Mockito.any(FsPath.class))) + .thenReturn(fs); + String path = this.getClass().getResource("/").getPath(); + + MvcResult mvcResult = + mockMvc + .perform(get("/filesystem/getDirFileTrees").param("path", path)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Message res = + JsonUtils.jackson() + .readValue(mvcResult.getResponse().getContentAsString(), Message.class); + + assertEquals(MessageStatus.SUCCESS(), res.getStatus()); + LOG.info(mvcResult.getResponse().getContentAsString()); + } + } + + @Test + @DisplayName("isExistTest") + public void isExistTest() throws Exception { + + FileSystem fs = new LocalFileSystem(); + fs.setUser("docker"); + Mockito.when(fsService.getFileSystem(Mockito.anyString(), Mockito.any(FsPath.class))) + .thenReturn(fs); + String path = this.getClass().getResource("/").getPath(); + + MvcResult mvcResult = + mockMvc + .perform(get("/filesystem/isExist").param("path", path)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Message res = + JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class); + + assertEquals(MessageStatus.SUCCESS(), res.getStatus()); + LOG.info(mvcResult.getResponse().getContentAsString()); + } + + @Test + @DisplayName("fileInfoTest") + public void fileInfoTest() throws Exception { + if (!FsPath.WINDOWS) { + FileSystem fs = new LocalFileSystem(); + fs.setUser("docker"); + String group = + Files.readAttributes( + Paths.get(this.getClass().getResource("/").getPath()), PosixFileAttributes.class) + .group() + .getName(); + fs.setGroup(new FsPath(this.getClass().getResource("/").getPath()), group); + Mockito.when(fsService.getFileSystem(Mockito.anyString(), Mockito.any(FsPath.class))) + .thenReturn(fs); + String path = this.getClass().getResource("/").getPath() + "query.sql"; + + MvcResult mvcResult = + mockMvc + .perform(get("/filesystem/fileInfo").param("path", path)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Message res = + JsonUtils.jackson() + .readValue(mvcResult.getResponse().getContentAsString(), Message.class); + + assertEquals(MessageStatus.SUCCESS(), res.getStatus()); + LOG.info(mvcResult.getResponse().getContentAsString()); + } + } + + @Test + @DisplayName("openFileTest") + public void openFileTest() throws Exception { + + if (!FsPath.WINDOWS) { + FileSystem fs = new LocalFileSystem(); + fs.setUser("docker"); + String group = + Files.readAttributes( + Paths.get(this.getClass().getResource("/").getPath()), PosixFileAttributes.class) + .group() + .getName(); + fs.setGroup(new FsPath(this.getClass().getResource("/").getPath()), group); + + Mockito.when(fsService.getFileSystem(Mockito.anyString(), Mockito.any(FsPath.class))) + .thenReturn(fs); + String path = this.getClass().getResource("/").getPath() + "query.sql"; + + MvcResult mvcResult = + mockMvc + .perform(get("/filesystem/fileInfo").param("path", path)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Message res = + JsonUtils.jackson() + .readValue(mvcResult.getResponse().getContentAsString(), Message.class); + + assertEquals(MessageStatus.SUCCESS(), res.getStatus()); + LOG.info(mvcResult.getResponse().getContentAsString()); + } + } + + @Test + @DisplayName("openLogTest") + public void openLogTest() throws Exception { + + if (!FsPath.WINDOWS) { + FileSystem fs = new LocalFileSystem(); + fs.setUser("docker"); + String group = + Files.readAttributes( + Paths.get(this.getClass().getResource("/").getPath()), PosixFileAttributes.class) + .group() + .getName(); + fs.setGroup(new FsPath(this.getClass().getResource("/").getPath()), group); + + Mockito.when(fsService.getFileSystem(Mockito.anyString(), Mockito.any(FsPath.class))) + .thenReturn(fs); + String path = this.getClass().getResource("/").getPath() + "info.log"; + + MvcResult mvcResult = + mockMvc + .perform(get("/filesystem/openLog").param("path", path)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Message res = + JsonUtils.jackson() + .readValue(mvcResult.getResponse().getContentAsString(), Message.class); + + assertEquals(MessageStatus.SUCCESS(), res.getStatus()); + LOG.info(mvcResult.getResponse().getContentAsString()); + } + } +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/util/WorkspaceUtilTest.java b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/util/WorkspaceUtilTest.java new file mode 100644 index 00000000000..2920c0bef7f --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/java/org/apache/linkis/filesystem/util/WorkspaceUtilTest.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem.util; + +import org.apache.linkis.filesystem.entity.LogLevel; + +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class WorkspaceUtilTest { + + @Test + @DisplayName("staticCommonConstTest") + public void staticCommonConstTest() { + + String infoReg = WorkspaceUtil.infoReg; + String allReg = WorkspaceUtil.allReg; + String errorReg = WorkspaceUtil.errorReg; + String warnReg = WorkspaceUtil.warnReg; + + Assertions.assertNotNull(infoReg); + Assertions.assertNotNull(allReg); + Assertions.assertNotNull(errorReg); + Assertions.assertNotNull(warnReg); + } + + @Test + @DisplayName("logMatchTest") + public void logMatchTest() { + String code = + "2022-09-18 01:03:35.120 INFO [SpringContextShutdownHook] com.netflix.util.concurrent.ShutdownEnabledTimer 67 cancel - Shutdown hook removed for: NFLoadBalancer-PingTimer-linkis-cg-linkismanager"; + LogLevel logLevel = new LogLevel(LogLevel.Type.INFO); + List logMatch = WorkspaceUtil.logMatch(code, logLevel); + + Assertions.assertTrue(logMatch.size() == 2); + } + + @Test + @DisplayName("suffixTuningTest") + public void suffixTuningTest() { + String path = "/home/hadoop/logs/linkis/apps"; + String tuningPath = WorkspaceUtil.suffixTuning(path); + + Assertions.assertNotNull(tuningPath); + } + + @Test + @DisplayName("isLogAdminTest") + public void isLogAdminTest() { + boolean logAdmin = WorkspaceUtil.isLogAdmin("hadoop"); + boolean admin = WorkspaceUtil.isLogAdmin("hadoops"); + + Assertions.assertTrue(logAdmin); + Assertions.assertFalse(admin); + } +} diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/application.properties b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/application.properties new file mode 100644 index 00000000000..6cbb4ce88a7 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/application.properties @@ -0,0 +1,63 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +#wds.linkis.test.mode=true +wds.linkis.server.version=v1 + +#test +wds.linkis.test.mode=true +wds.linkis.test.user=hadoop + +wds.linkis.is.gateway=true +wds.linkis.server.web.session.timeout=1h +wds.linkis.gateway.conf.enable.proxy.user=false +wds.linkis.gateway.conf.url.pass.auth=/dss/ +wds.linkis.gateway.conf.enable.token.auth=true +wds.linkis.login_encrypt.enable=false +#logging.level.root=debug +#logging.file=./test.log +#debug=true + +#ng.datasource.driver-class-name=org.h2.Driver +#spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true +#spring.datasource.schema=classpath:create.sql +#spring.datasource.data=classpath:data.sql +##spring.datasource.username=sa +#spring.datasource.password= +#spring.datasource.hikari.connection-test-query=select 1 +##spring.datasource.hikari.minimum-idle=5 +#spring.datasource.hikari.auto-commit=true +#spring.datasource.hikari.validation-timeout=3000 +#spring.datasource.hikari.pool-name=linkis-test +#spring.datasource.hikari.maximum-pool-size=50 +#spring.datasource.hikari.connection-timeout=30000 +#spring.datasource.hikari.idle-timeout=600000 +#spring.datasource.hikari.leak-detection-threshold=0 +#spring.datasource.hikari.initialization-fail-timeout=1 + +#spring.main.web-application-type=servlet +#server.port=1234 +#spring.h2.console.enabled=true + +#disable eureka discovery client +spring.cloud.service-registry.auto-registration.enabled=false +eureka.client.enabled=false +eureka.client.serviceUrl.registerWithEureka=false +linkis.workspace.filesystem.auto.create=true + +#mybatis-plus.mapper-locations=classpath*:org/apache/linkis/udf/dao/impl/*.xml +#mybatis-plus.type-aliases-package=org.apache.linkis.udf.entity +#mybatis-plus.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl + diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/MetadataService.java b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/deleteFile.sql similarity index 81% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/MetadataService.java rename to linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/deleteFile.sql index 502ff07938d..95f334f4158 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/common/src/main/java/org/apache/linkis/metadata/query/common/service/MetadataService.java +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/deleteFile.sql @@ -6,15 +6,12 @@ * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - */ - -package org.apache.linkis.metadata.query.common.service; - -public interface MetadataService extends MetadataDbService {} +*/ +show databases; \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/existsSql.sql b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/existsSql.sql new file mode 100644 index 00000000000..95f334f4158 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/existsSql.sql @@ -0,0 +1,17 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +show databases; \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/info.log b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/info.log new file mode 100644 index 00000000000..153f737caf8 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/info.log @@ -0,0 +1,19 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +2022-09-18 01:03:35.133 INFO [SpringContextShutdownHook] com.netflix.discovery.DiscoveryClient 935 shutdown - Shutting down DiscoveryClient ... +2022-09-18 01:03:38.135 INFO [SpringContextShutdownHook] com.netflix.discovery.DiscoveryClient 971 unregister - Unregistering ... +2022-09-18 01:03:38.138 INFO [SpringContextShutdownHook] com.netflix.discovery.DiscoveryClient 973 unregister - DiscoveryClient_LINKIS-CG-ENGINECONN/bd210:linkis-cg-engineconn:43115 - deregister status: 200 diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/linkis.properties b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/linkis.properties new file mode 100644 index 00000000000..56c5f05cf44 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/linkis.properties @@ -0,0 +1,22 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +#wds.linkis.test.mode=true +wds.linkis.server.version=v1 +wds.linkis.storage.enable.io.proxy=true +#linkis.workspace.filesystem.auto.create=true +#test +wds.linkis.test.mode=true +wds.linkis.test.user=hadoop \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/query.sql b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/query.sql new file mode 100644 index 00000000000..95f334f4158 --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/resources/query.sql @@ -0,0 +1,17 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +show databases; \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/scala/org/apache/linkis/filesystem/entity/FSInfoTest.scala b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/scala/org/apache/linkis/filesystem/entity/FSInfoTest.scala new file mode 100644 index 00000000000..cb1b6bbcf2b --- /dev/null +++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/test/scala/org/apache/linkis/filesystem/entity/FSInfoTest.scala @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.filesystem.entity + +import org.apache.linkis.storage.fs.impl.LocalFileSystem + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class FSInfoTest { + + @Test + @DisplayName("timeoutTest") + def timeoutTest(): Unit = { + + val info = new FSInfo("1", new LocalFileSystem(), System.currentTimeMillis()) + val timeout = info.timeout + Assertions.assertFalse(timeout) + + } + +} diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-client/pom.xml b/linkis-public-enhancements/linkis-udf/linkis-udf-client/pom.xml index 3cc86e32bd8..9d0984f0a1c 100644 --- a/linkis-public-enhancements/linkis-udf/linkis-udf-client/pom.xml +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-client/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-udf-client jar diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-client/src/test/scala/org/apache/linkis/udf/UDFClientConfigurationTest.scala b/linkis-public-enhancements/linkis-udf/linkis-udf-client/src/test/scala/org/apache/linkis/udf/UDFClientConfigurationTest.scala new file mode 100644 index 00000000000..775ff02b4d7 --- /dev/null +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-client/src/test/scala/org/apache/linkis/udf/UDFClientConfigurationTest.scala @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.udf + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class UDFClientConfigurationTest { + + @Test + @DisplayName("staticCommonConst") + def staticCommonConst(): Unit = { + + val serviceName = "linkis-ps-publicservice" + Assertions.assertEquals(serviceName, UDFClientConfiguration.UDF_SERVICE_NAME.getValue) + + } + +} diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-common/pom.xml b/linkis-public-enhancements/linkis-udf/linkis-udf-common/pom.xml index bee3ae6cfcf..4f16ee8a631 100644 --- a/linkis-public-enhancements/linkis-udf/linkis-udf-common/pom.xml +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-common/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-udf-common jar diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-common/src/test/java/org/apache/linkis/udf/excepiton/UDFExceptionTest.java b/linkis-public-enhancements/linkis-udf/linkis-udf-common/src/test/java/org/apache/linkis/udf/excepiton/UDFExceptionTest.java new file mode 100644 index 00000000000..58d03594eaa --- /dev/null +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-common/src/test/java/org/apache/linkis/udf/excepiton/UDFExceptionTest.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.udf.excepiton; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class UDFExceptionTest { + + @Test + @DisplayName("udfExceptionTest") + public void udfExceptionTest() { + String errorMsg = "udf is not exist"; + UDFException udfException = new UDFException(errorMsg); + Assertions.assertEquals(errorMsg, udfException.getMessage()); + } +} diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-common/src/test/java/org/apache/linkis/udf/utils/ConstantVarTest.java b/linkis-public-enhancements/linkis-udf/linkis-udf-common/src/test/java/org/apache/linkis/udf/utils/ConstantVarTest.java new file mode 100644 index 00000000000..fa325dfc524 --- /dev/null +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-common/src/test/java/org/apache/linkis/udf/utils/ConstantVarTest.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.udf.utils; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ConstantVarTest { + + @Test + @DisplayName("staticConstTest") + public void staticConstTest() { + + Assertions.assertTrue(ConstantVar.UDF_JAR == 0); + Assertions.assertTrue(ConstantVar.UDF_PY == 1); + Assertions.assertTrue(ConstantVar.UDF_SCALA == 2); + Assertions.assertTrue(ConstantVar.FUNCTION_PY == 3); + Assertions.assertTrue(ConstantVar.FUNCTION_SCALA == 4); + + Assertions.assertEquals("function", ConstantVar.FUNCTION); + Assertions.assertEquals("udf", ConstantVar.UDF); + Assertions.assertEquals("all", ConstantVar.ALL); + + Assertions.assertEquals("sys", ConstantVar.SYS_USER); + Assertions.assertEquals("bdp", ConstantVar.BDP_USER); + Assertions.assertEquals("self", ConstantVar.SELF_USER); + Assertions.assertEquals("share", ConstantVar.SHARE_USER); + Assertions.assertEquals("expire", ConstantVar.EXPIRE_USER); + + Assertions.assertTrue(ConstantVar.specialTypes.length == 4); + } +} diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/pom.xml b/linkis-public-enhancements/linkis-udf/linkis-udf-service/pom.xml index a2cd26c9905..311983e3e44 100644 --- a/linkis-public-enhancements/linkis-udf/linkis-udf-service/pom.xml +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-udf-service jar diff --git a/linkis-public-enhancements/linkis-variable/pom.xml b/linkis-public-enhancements/linkis-variable/pom.xml index c2c2ccf15aa..4e6622f64a9 100644 --- a/linkis-public-enhancements/linkis-variable/pom.xml +++ b/linkis-public-enhancements/linkis-variable/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../pom.xml linkis-variable diff --git a/linkis-public-enhancements/linkis-variable/src/main/scala/org/apache/linkis/variable/service/VariableServiceImpl.scala b/linkis-public-enhancements/linkis-variable/src/main/scala/org/apache/linkis/variable/service/VariableServiceImpl.scala index 9fbba744800..7651ab538f9 100644 --- a/linkis-public-enhancements/linkis-variable/src/main/scala/org/apache/linkis/variable/service/VariableServiceImpl.scala +++ b/linkis-public-enhancements/linkis-variable/src/main/scala/org/apache/linkis/variable/service/VariableServiceImpl.scala @@ -110,7 +110,7 @@ class VariableServiceImpl extends VariableService with Logging { for (ele <- userVariables.asScala) { if (f.getKey.equals(ele.getKey)) { flag = false - updateGlobalVariable(f, ele.getValueID: Long) + updateGlobalVariable(f, ele.getValueID) break() } } @@ -126,7 +126,7 @@ class VariableServiceImpl extends VariableService with Logging { break() } } - if (flag) removeGlobalVariable(f.getKeyID: Long) + if (flag) removeGlobalVariable(f.getKeyID) } } diff --git a/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/pom.xml b/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/pom.xml index ab6ed4dff07..24024a3f3e6 100644 --- a/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/pom.xml +++ b/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-eureka diff --git a/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/src/main/assembly/distribution.xml b/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/src/main/assembly/distribution.xml index 1fded5f0c56..f722478d422 100644 --- a/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/src/main/assembly/distribution.xml +++ b/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + eurekaServer dir diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/pom.xml index dd1f9e7646f..45802a87f00 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/pom.xml +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/pom.xml @@ -19,8 +19,9 @@ 4.0.0 org.apache.linkis - linkis-service-gateway + linkis 1.3.0 + ../../../pom.xml linkis-gateway-authentication diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/java/org/apache/linkis/gateway/authentication/errorcode/LinkisGwAuthenticationErrorCodeSummary.java b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/java/org/apache/linkis/gateway/authentication/errorcode/LinkisGwAuthenticationErrorCodeSummary.java new file mode 100644 index 00000000000..6519d2e2794 --- /dev/null +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/java/org/apache/linkis/gateway/authentication/errorcode/LinkisGwAuthenticationErrorCodeSummary.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.gateway.authentication.errorcode; + +public enum LinkisGwAuthenticationErrorCodeSummary { + TOKEN_IS_NULL(15205, "token is null!(令牌为空!)", "token is null!(令牌为空!)"), + FAILED_TO_LOAD_TOKEN( + 15200, + "Failed to load token from DB into cache!(无法将令牌从数据库加载到缓存中!)", + "Failed to load token from DB into cache!(无法将令牌从数据库加载到缓存中!)"), + TOKEN_VALID_OR_STALE( + 15201, "Token is not valid or stale!(令牌无效或陈旧!)", "Token is not valid or stale!(令牌无效或陈旧!)"), + ILLEGAL_TOKENUSER( + 15202, "Illegal TokenUser for Token!(代币非法用户!)", "Illegal TokenUser for Token!(代币非法用户!)"), + ILLEGAL_HOST(15203, "Illegal Host for Token!(Token非法主机!)", "Illegal Host for Token!(Token非法主机!)"), + INVALID_TOKEN(15204, "Invalid Token(令牌无效)", "Invalid Token(令牌无效)"); + + /** (errorCode)错误码 */ + private int errorCode; + /** (errorDesc)错误描述 */ + private String errorDesc; + /** Possible reasons for the error(错误可能出现的原因) */ + private String comment; + + LinkisGwAuthenticationErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/scala/org/apache/linkis/gateway/authentication/service/CachedTokenService.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/scala/org/apache/linkis/gateway/authentication/service/CachedTokenService.scala index a314845c95e..e839d0b5b52 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/scala/org/apache/linkis/gateway/authentication/service/CachedTokenService.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/scala/org/apache/linkis/gateway/authentication/service/CachedTokenService.scala @@ -17,14 +17,13 @@ package org.apache.linkis.gateway.authentication.service -import org.apache.linkis.common.exception.ErrorException import org.apache.linkis.common.utils.Utils import org.apache.linkis.gateway.authentication.bo.{Token, User} -import org.apache.linkis.gateway.authentication.bo.Token import org.apache.linkis.gateway.authentication.bo.impl.TokenImpl import org.apache.linkis.gateway.authentication.conf.TokenConfiguration import org.apache.linkis.gateway.authentication.dao.TokenDao import org.apache.linkis.gateway.authentication.entity.TokenEntity +import org.apache.linkis.gateway.authentication.errorcode.LinkisGwAuthenticationErrorCodeSummary._ import org.apache.linkis.gateway.authentication.exception.{ TokenAuthException, TokenNotExistException @@ -54,7 +53,7 @@ class CachedTokenService extends TokenService { if (tokenEntity != null) { new TokenImpl().convertFrom(tokenEntity) } else { - throw new TokenNotExistException(15204, s"Invalid Token") + throw new TokenNotExistException(INVALID_TOKEN.getErrorCode, INVALID_TOKEN.getErrorDesc) } } @@ -105,7 +104,7 @@ class CachedTokenService extends TokenService { private def loadTokenFromCache(tokenName: String): Token = { if (tokenName == null) { - throw new TokenAuthException(15205, "Token is null!") + throw new TokenAuthException(TOKEN_IS_NULL.getErrorCode, TOKEN_IS_NULL.getErrorDesc) } Utils.tryCatch(tokenCache.get(tokenName))(t => t match { @@ -113,9 +112,16 @@ class CachedTokenService extends TokenService { x.getCause match { case _: TokenNotExistException => null case _ => - throw new TokenAuthException(15200, "Failed to load token from DB into cache!") + throw new TokenAuthException( + FAILED_TO_LOAD_TOKEN.getErrorCode, + FAILED_TO_LOAD_TOKEN.getErrorDesc + ) } - case _ => throw new TokenAuthException(15200, "Failed to load token from DB into cache!") + case _ => + throw new TokenAuthException( + FAILED_TO_LOAD_TOKEN.getErrorCode, + FAILED_TO_LOAD_TOKEN.getErrorDesc + ) } ) } @@ -149,15 +155,18 @@ class CachedTokenService extends TokenService { var ok: Boolean = true if (!isTokenValid(tmpToken)) { ok = false - throw new TokenAuthException(15201, "Token is not valid or stale!") + throw new TokenAuthException( + TOKEN_VALID_OR_STALE.getErrorCode, + TOKEN_VALID_OR_STALE.getErrorDesc + ) } if (!isTokenAcceptableWithUser(tmpToken, userName)) { ok = false - throw new TokenAuthException(15202, "Illegal TokenUser for Token!") + throw new TokenAuthException(ILLEGAL_TOKENUSER.getErrorCode, ILLEGAL_TOKENUSER.getErrorDesc) } if (!isTokenAcceptableWithHost(tmpToken, host)) { ok = false - throw new TokenAuthException(15203, "Illegal Host for Token!") + throw new TokenAuthException(ILLEGAL_HOST.getErrorCode, ILLEGAL_HOST.getErrorDesc) } ok } diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml index fbe402ee3be..a2b1cb7c02c 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-gateway-core diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/parser/GatewayParser.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/parser/GatewayParser.scala index 91debac069c..550388112cc 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/parser/GatewayParser.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/parser/GatewayParser.scala @@ -124,6 +124,11 @@ class DefaultGatewayParser(gatewayParsers: Array[GatewayParser]) extends Abstrac // In order to be compatible with metadata module name refactoring,this logic will be removed in subsequent versions } else if (RPCConfiguration.METADATAQUERY_SERVICE_LIST.contains(serviceId)) { RPCConfiguration.METADATAQUERY_SERVICE_APPLICATION_NAME.getValue + } else if ( + RPCConfiguration.LINKIS_MANAGER_SERVICE_MERGED.getValue && RPCConfiguration.LINKIS_MANAGER_SERVICE_LIST + .contains(serviceId) + ) { + RPCConfiguration.LINKIS_MANAGER_APPLICATION_NAME.getValue } else { serviceId } diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/route/GatewayRouter.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/route/GatewayRouter.scala index 78123b8362f..0952b08ab50 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/route/GatewayRouter.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/route/GatewayRouter.scala @@ -130,9 +130,12 @@ abstract class AbstractGatewayRouter extends GatewayRouter with Logging { class DefaultGatewayRouter(var gatewayRouters: Array[GatewayRouter]) extends AbstractGatewayRouter { - gatewayRouters = gatewayRouters.sortWith((left, right) => { - left.order() < right.order() - }) + if (gatewayRouters != null && gatewayRouters.nonEmpty) { + val notNullRouters = gatewayRouters.filter(x => x != null) + gatewayRouters = notNullRouters.sortWith((left, right) => { + left.order() < right.order() + }) + } private def findCommonService(parsedServiceId: String) = findService( parsedServiceId, diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/ProxyUserUtils.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/ProxyUserUtils.scala index 16f0ffad84d..4da20e304ce 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/ProxyUserUtils.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/ProxyUserUtils.scala @@ -27,6 +27,8 @@ import java.io.File import java.util.Properties import java.util.concurrent.TimeUnit +import scala.collection.JavaConverters._ + object ProxyUserUtils extends Logging { private val (props, file) = @@ -57,7 +59,7 @@ object ProxyUserUtils extends Logging { val newProps = new Properties val input = FileUtils.openInputStream(file) Utils.tryFinally(newProps.load(input))(IOUtils.closeQuietly(input)) - props.putAll(newProps) + newProps.asScala.foreach { case (k, v) => props.put(k, v) } } def getProxyUser(umUser: String): String = if (ENABLE_PROXY_USER.getValue) { diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/pom.xml index f8c5bc2adf5..979302eefe6 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/pom.xml +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/pom.xml @@ -22,6 +22,7 @@ org.apache.linkis linkis 1.3.0 + ../../../pom.xml linkis-gateway-httpclient-support diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/java/org/apache/linkis/httpclient/errorcode/LinkisGwHttpclientSupportErrorCodeSummary.java b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/java/org/apache/linkis/httpclient/errorcode/LinkisGwHttpclientSupportErrorCodeSummary.java new file mode 100644 index 00000000000..bea45beb550 --- /dev/null +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/java/org/apache/linkis/httpclient/errorcode/LinkisGwHttpclientSupportErrorCodeSummary.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.httpclient.errorcode; + +public enum LinkisGwHttpclientSupportErrorCodeSummary { + AUTHTOKENVALUE_BE_EXISTS( + 10901, + "the value of authTokenValue in ClientConfig must be exists, since no password is found to login(ClientConfig中authTokenValue的值必须存在,因为没有找到密码登录.)", + "the value of authTokenValue in ClientConfig must be exists, since no password is found to login(ClientConfig中authTokenValue的值必须存在,因为没有找到密码登录.)"), + TOKEN_AUTHENTICATION( + 10901, + "cannot use token authentication, since no user is found to proxy(无法使用令牌身份验证,因为找不到代理用户)", + "cannot use token authentication, since no user is found to proxy(无法使用令牌身份验证,因为找不到代理用户)"), + CLIENTCONFIG_MUST( + 10901, + "ClientConfig must specify the DWS version(ClientConfig必须指定DWS版本)", + "ClientConfig must specify the DWS version(ClientConfig必须指定DWS版本)"); + /** 错误码 */ + private int errorCode; + /** 错误描述 */ + private String errorDesc; + /** 错误可能出现的原因 */ + private String comment; + + LinkisGwHttpclientSupportErrorCodeSummary(int errorCode, String errorDesc, String comment) { + this.errorCode = errorCode; + this.errorDesc = errorDesc; + this.comment = comment; + } + + public int getErrorCode() { + return errorCode; + } + + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String toString() { + return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc; + } +} diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/DWSHttpClient.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/DWSHttpClient.scala index 93c1de8176c..8d1c87c48fb 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/DWSHttpClient.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/DWSHttpClient.scala @@ -39,7 +39,7 @@ import org.apache.http.{HttpException, HttpResponse} import java.util -import scala.collection.{JavaConversions, JavaConverters} +import scala.collection.JavaConverters._ class DWSHttpClient(clientConfig: DWSClientConfig, clientName: String) extends AbstractHttpClient(clientConfig, clientName) @@ -101,15 +101,11 @@ class DWSHttpClient(clientConfig: DWSClientConfig, clientName: String) transfer(value, map) value case list: util.List[util.Map[String, Object]] => - val results = JavaConverters - .asScalaBufferConverter(list) - .asScala - .map { map => - val value = clazz.getConstructor().newInstance().asInstanceOf[Result] - transfer(value, map) - value - } - .toArray + val results = list.asScala.map { map => + val value = clazz.getConstructor().newInstance().asInstanceOf[Result] + transfer(value, map) + value + }.toArray new ListResult(responseBody, results) } } diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/authentication/StaticAuthenticationStrategy.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/authentication/StaticAuthenticationStrategy.scala index 80682268e3e..2bd50a1ac86 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/authentication/StaticAuthenticationStrategy.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/authentication/StaticAuthenticationStrategy.scala @@ -27,6 +27,7 @@ import org.apache.linkis.httpclient.authentication.{ import org.apache.linkis.httpclient.dws.exception.AuthenticationFailedException import org.apache.linkis.httpclient.dws.request.DWSAuthenticationAction import org.apache.linkis.httpclient.dws.response.DWSAuthenticationResult +import org.apache.linkis.httpclient.errorcode.LinkisGwHttpclientSupportErrorCodeSummary.AUTHTOKENVALUE_BE_EXISTS import org.apache.linkis.httpclient.request.{Action, UserAction, UserPwdAction} import org.apache.commons.lang3.StringUtils @@ -64,9 +65,7 @@ class StaticAuthenticationStrategy(override protected val sessionMaxAliveTime: L def pwd: String = if (StringUtils.isNotBlank(getClientConfig.getAuthTokenValue)) { getClientConfig.getAuthTokenValue } else { - throw new AuthenticationFailedException( - "the value of authTokenValue in ClientConfig must be exists, since no password is found to login." - ) + throw new AuthenticationFailedException(AUTHTOKENVALUE_BE_EXISTS.getErrorDesc) } requestAction match { @@ -78,9 +77,7 @@ class StaticAuthenticationStrategy(override protected val sessionMaxAliveTime: L action.addRequestPayload("password", pwd) case _ => if (StringUtils.isBlank(getClientConfig.getAuthTokenKey)) { - throw new AuthenticationFailedException( - "the value of authTokenKey in ClientConfig must be exists, since no user is found to login." - ) + throw new AuthenticationFailedException(AUTHTOKENVALUE_BE_EXISTS.getErrorDesc) } action.addRequestPayload("userName", getClientConfig.getAuthTokenKey) action.addRequestPayload("password", pwd) diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/authentication/TokenAuthenticationStrategy.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/authentication/TokenAuthenticationStrategy.scala index 899cdbe2acd..e2a0843b796 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/authentication/TokenAuthenticationStrategy.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/authentication/TokenAuthenticationStrategy.scala @@ -19,6 +19,7 @@ package org.apache.linkis.httpclient.dws.authentication import org.apache.linkis.httpclient.authentication._ import org.apache.linkis.httpclient.dws.exception.AuthenticationFailedException +import org.apache.linkis.httpclient.errorcode.LinkisGwHttpclientSupportErrorCodeSummary.TOKEN_AUTHENTICATION import org.apache.linkis.httpclient.request.{Action, UserAction} import org.apache.http.HttpResponse @@ -53,9 +54,7 @@ class TokenAuthenticationStrategy(override protected val sessionMaxAliveTime: Lo override def updateLastAccessTime(): Unit = {} } case _ => - throw new AuthenticationFailedException( - "cannot use token authentication, since no user is found to proxy." - ) + throw new AuthenticationFailedException(TOKEN_AUTHENTICATION.getErrorDesc) } override protected def getAuthenticationAction( diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/exception/AuthenticationFailedException.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/exception/AuthenticationFailedException.scala index 5adaa8121e2..11c4d2455a7 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/exception/AuthenticationFailedException.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/exception/AuthenticationFailedException.scala @@ -18,5 +18,7 @@ package org.apache.linkis.httpclient.dws.exception import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.httpclient.errorcode.LinkisGwHttpclientSupportErrorCodeSummary.AUTHTOKENVALUE_BE_EXISTS -class AuthenticationFailedException(errorDesc: String) extends ErrorException(10901, errorDesc) +class AuthenticationFailedException(errorDesc: String) + extends ErrorException(AUTHTOKENVALUE_BE_EXISTS.getErrorCode, errorDesc) diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/exception/UnknownVersionException.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/exception/UnknownVersionException.scala index 8df3d694100..5f2db3c42c1 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/exception/UnknownVersionException.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/exception/UnknownVersionException.scala @@ -18,9 +18,7 @@ package org.apache.linkis.httpclient.dws.exception import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.httpclient.errorcode.LinkisGwHttpclientSupportErrorCodeSummary.CLIENTCONFIG_MUST class UnknownVersionException - extends ErrorException( - 10901, - "ClientConfig must specify the DWS version(ClientConfig必须指定DWS版本)!" - ) + extends ErrorException(CLIENTCONFIG_MUST.getErrorCode, CLIENTCONFIG_MUST.getErrorDesc) diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml index f094ea3ad60..aba14765a30 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml @@ -60,12 +60,6 @@ - - javax.servlet.jsp - jsp-api - 2.1 - - org.springframework spring-web @@ -100,6 +94,13 @@ jackson-databind + + + com.webank.wedatasphere.dss + dss-gateway-support + 1.1.0 + + diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/assembly/distribution.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/assembly/distribution.xml index 926d9b7e16f..7180fea9cec 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/assembly/distribution.xml +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/assembly/distribution.xml @@ -15,11 +15,9 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - + + linkis-gateway dir diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/route/HaContextGatewayRouter.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/route/HaContextGatewayRouter.scala index b7426eff652..02e2f17f4e1 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/route/HaContextGatewayRouter.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/route/HaContextGatewayRouter.scala @@ -166,7 +166,7 @@ object HaContextGatewayRouter { val CONTEXT_SERVICE_NAME: String = if ( RPCConfiguration.ENABLE_PUBLIC_SERVICE.getValue && RPCConfiguration.PUBLIC_SERVICE_LIST - .exists(_.equalsIgnoreCase(RPCConfiguration.CONTEXT_SERVICE_REQUEST_PREFIX)) + .contains(RPCConfiguration.CONTEXT_SERVICE_REQUEST_PREFIX) ) { RPCConfiguration.PUBLIC_SERVICE_APPLICATION_NAME.getValue } else { diff --git a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/pom.xml index 3c3d26ba1fd..c3a7d9e72cc 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/pom.xml +++ b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/pom.xml @@ -23,6 +23,7 @@ linkis org.apache.linkis 1.3.0 + ../../../../pom.xml 4.0.0 diff --git a/linkis-web/config.sh b/linkis-web/config.sh index 7a4448bb890..f0b9578dcde 100644 --- a/linkis-web/config.sh +++ b/linkis-web/config.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with diff --git a/linkis-web/install.sh b/linkis-web/install.sh index 4347999e7c4..e2b0eadcbb0 100644 --- a/linkis-web/install.sh +++ b/linkis-web/install.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -89,7 +89,7 @@ linkisConf(){ server_name localhost; #charset koi8-r; #access_log /var/log/nginx/host.access.log main; - + location / { root ${linkis_basepath}/dist; #static directory index index.html index.html; diff --git a/linkis-web/package.json b/linkis-web/package.json index 7b20c4de6e2..3229d70445d 100644 --- a/linkis-web/package.json +++ b/linkis-web/package.json @@ -43,6 +43,7 @@ "reconnecting-websocket": "4.4.0", "sql-formatter": "2.3.3", "svgo": "1.3.0", + "v-jsoneditor": "^1.4.5", "vue": "2.6.12", "vue-i18n": "8.22.1", "vue-router": "3.4.8", diff --git a/linkis-web/src/apps/linkis/i18n/common/en.json b/linkis-web/src/apps/linkis/i18n/common/en.json index 71096cb5b7b..c6ebd0aa9c8 100644 --- a/linkis-web/src/apps/linkis/i18n/common/en.json +++ b/linkis-web/src/apps/linkis/i18n/common/en.json @@ -329,7 +329,9 @@ }, "basedata": { "add": "add", - "remove": "remove" + "remove": "remove", + "edit": "edit", + "addUDFAdmin": "add UDF administrator" } } } diff --git a/linkis-web/src/apps/linkis/i18n/common/zh.json b/linkis-web/src/apps/linkis/i18n/common/zh.json index b14b2d03edf..6d46ba1370d 100644 --- a/linkis-web/src/apps/linkis/i18n/common/zh.json +++ b/linkis-web/src/apps/linkis/i18n/common/zh.json @@ -330,7 +330,9 @@ }, "basedata": { "add": "新增", - "remove": "删除" + "remove": "删除", + "edit": "编辑", + "addUDFAdmin": "新增UDF管理员" }, "EnginePluginManagement": { "engineConnType": "引擎类型", diff --git a/linkis-web/src/apps/linkis/module/datasourceAccess/index.vue b/linkis-web/src/apps/linkis/module/datasourceAccess/index.vue index 1ca327f8703..c311e1e7141 100644 --- a/linkis-web/src/apps/linkis/module/datasourceAccess/index.vue +++ b/linkis-web/src/apps/linkis/module/datasourceAccess/index.vue @@ -34,39 +34,50 @@ - - -
-

-
- +
+ + +
+
+
+
+
diff --git a/linkis-web/src/apps/linkis/module/datasourceEnv/EditForm/index.vue b/linkis-web/src/apps/linkis/module/datasourceEnv/EditForm/index.vue index 1637d1cb357..520a5d4567b 100644 --- a/linkis-web/src/apps/linkis/module/datasourceEnv/EditForm/index.vue +++ b/linkis-web/src/apps/linkis/module/datasourceEnv/EditForm/index.vue @@ -34,6 +34,7 @@ export default { }, data() { return { + keyToName: {}, formModel: {}, formData: {}, options: { @@ -95,32 +96,65 @@ export default { ], }, { - type: 'input', - title: "数据源ID", - field: 'datasourceTypeId', - value: '', - props: { - placeholder: "", - }, + type: "select", + field: "datasourceTypeId", + title: "数据源环境", + value: 1, + options: [], validate: [ { required: true, message: `${this.$t( 'message.linkis.datasource.pleaseInput' - )}"数据源ID"`, - trigger: 'blur', + )}"数据源环境"` }, ], }, { - type: 'input', + type: "radio", + title: "kerboros认证", + field: "keytab", + value: false, + options: [ + {value: false,label: "否",disabled: false}, + {value: true,label: "是",disabled: false}, + ], + on: { + 'on-change': () => { + this.rule[5].hidden = !this.rule[5].hidden; + } + } + }, + { + type: "upload", + field: "pic", + title: "keytab", + value: [], + hidden: true, + props: { + uploadType: 'file', + action: "/api/rest_j/v1/bml/upload", + maxLength: 1, + multiple: false, + onSuccess: (res) =>{ + let tmpParameter = this.formData.parameter ? JSON.parse(this.formData.parameter) : {}; + tmpParameter.keytab = res.data.resourceId; + this.formData.parameter = JSON.stringify(tmpParameter); + } + }, + }, + { + type: 'v-jsoneditor', title: "参数", field: 'parameter', value: '', props: { - placeholder: "", - "type": "textarea", - "rows": 5 + type: 'form-create', + height: "280px", + options: { + mode: "code", + modes: ['code','tree'], + } }, validate: [ { @@ -131,7 +165,7 @@ export default { trigger: 'blur', }, ], - } + }, ] } }, @@ -141,15 +175,36 @@ export default { methods: { getData(data){ this.formData = {...data} - } + this.formData.parameter = JSON.parse(this.formData.parameter) + }, + changeSelector(options){ + console.log('test', options) + this.rule[3].options = [...options]; + options.forEach(ele=> { + this.keyToName[ele.value] = ele.label; + }) + }, }, watch: { data: { handler(newV) { + this.rule[4].hidden = this.keyToName[newV.datasourceTypeId] == 'hive' ? false : true; + this.rule[5].hidden = !this.formData.keytab; + if(this.rule[4].hidden) this.rule[5].hidden = true; this.getData(newV) }, deep: true, }, + formData: { + handler(newV){ + console.log(this.keyToName) + this.rule[4].hidden = this.keyToName[newV.datasourceTypeId] == 'hive' ? false : true; + if(this.rule[4].hidden) this.rule[5].hidden = true; + else if(this.formData.keytab && newV.datasourceTypeId == 4) this.rule[5].hidden = false; + else this.rule[5].hidden = true; + }, + deep: true + } }, } diff --git a/linkis-web/src/apps/linkis/module/datasourceEnv/index.vue b/linkis-web/src/apps/linkis/module/datasourceEnv/index.vue index 5dc0e35384c..fd0aa9efdf6 100644 --- a/linkis-web/src/apps/linkis/module/datasourceEnv/index.vue +++ b/linkis-web/src/apps/linkis/module/datasourceEnv/index.vue @@ -34,39 +34,36 @@ - - -
-
-
- -
+
+ + +
@@ -75,12 +72,26 @@
+
+
+ +
+