From 13bbbd4d7f099d9c5b1407cd9f36764c4dd62eaa Mon Sep 17 00:00:00 2001
From: HangyuanLiu <460660596@qq.com>
Date: Fri, 15 Sep 2023 14:25:09 +0800
Subject: [PATCH 1/4] Support Ranger StarRocks Plugin
---
.../ranger-servicedef-starrocks.json | 696 ++++++++++++++++++
plugin-starrocks/.gitignore | 1 +
.../conf/ranger-policymgr-ssl.xml | 49 ++
.../conf/ranger-starrocks-audit.xml | 178 +++++
.../conf/ranger-starrocks-security.xml | 74 ++
plugin-starrocks/pom.xml | 131 ++++
.../src/dev-support/findbugsIncludeFile.xml | 25 +
.../src/dev-support/ranger-pmd-ruleset.xml | 121 +++
.../starrocks/RangerServiceStarRocks.java | 124 ++++
.../starrocks/client/StarRocksClient.java | 575 +++++++++++++++
.../client/StarRocksConnectionManager.java | 95 +++
.../client/StarRocksResourceManager.java | 191 +++++
pom.xml | 17 +
13 files changed, 2277 insertions(+)
create mode 100644 agents-common/src/main/resources/service-defs/ranger-servicedef-starrocks.json
create mode 100644 plugin-starrocks/.gitignore
create mode 100644 plugin-starrocks/conf/ranger-policymgr-ssl.xml
create mode 100644 plugin-starrocks/conf/ranger-starrocks-audit.xml
create mode 100644 plugin-starrocks/conf/ranger-starrocks-security.xml
create mode 100644 plugin-starrocks/pom.xml
create mode 100644 plugin-starrocks/src/dev-support/findbugsIncludeFile.xml
create mode 100644 plugin-starrocks/src/dev-support/ranger-pmd-ruleset.xml
create mode 100644 plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/RangerServiceStarRocks.java
create mode 100644 plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksClient.java
create mode 100644 plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksConnectionManager.java
create mode 100644 plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java
diff --git a/agents-common/src/main/resources/service-defs/ranger-servicedef-starrocks.json b/agents-common/src/main/resources/service-defs/ranger-servicedef-starrocks.json
new file mode 100644
index 0000000000..6ec818d34d
--- /dev/null
+++ b/agents-common/src/main/resources/service-defs/ranger-servicedef-starrocks.json
@@ -0,0 +1,696 @@
+{
+ "name": "starrocks",
+ "displayName": "starrocks",
+ "implClass": "org.apache.ranger.services.starrocks.RangerServiceStarRocks",
+ "label": "StarRocks",
+ "description": "StarRocks",
+ "resources": [
+ {
+ "itemId": 1,
+ "name": "catalog",
+ "type": "string",
+ "level": 10,
+ "parent": "",
+ "mandatory": true,
+ "isValidLeaf": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks Catalog",
+ "description": "StarRocks Catalog",
+ "accessTypeRestrictions": [
+ "usage",
+ "create database",
+ "drop",
+ "alter"
+ ]
+ },
+ {
+ "itemId": 2,
+ "name": "database",
+ "type": "string",
+ "level": 20,
+ "parent": "catalog",
+ "mandatory": true,
+ "isValidLeaf": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks Database",
+ "description": "StarRocks Database",
+ "accessTypeRestrictions": [
+ "create table",
+ "drop",
+ "alter",
+ "create view",
+ "create function",
+ "create materialized view"
+ ]
+ },
+ {
+ "itemId": 3,
+ "name": "table",
+ "type": "string",
+ "level": 30,
+ "parent": "database",
+ "mandatory": true,
+ "isValidLeaf": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks Table",
+ "description": "StarRocks Table",
+ "accessTypeRestrictions": [
+ "delete",
+ "drop",
+ "insert",
+ "select",
+ "alter",
+ "export",
+ "update"
+ ]
+ },
+ {
+ "itemId": 4,
+ "name": "column",
+ "type": "string",
+ "level": 40,
+ "parent": "table",
+ "mandatory": true,
+ "isValidLeaf": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks Column",
+ "description": "StarRocks Column",
+ "accessTypeRestrictions": [
+ "select"
+ ]
+ },
+ {
+ "itemId": 5,
+ "name": "view",
+ "type": "string",
+ "level": 30,
+ "parent": "database",
+ "mandatory": true,
+ "isValidLeaf": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks View",
+ "description": "StarRocks View",
+ "accessTypeRestrictions": [
+ "select",
+ "drop",
+ "alter"
+ ]
+ },
+ {
+ "itemId": 6,
+ "name": "materialized_view",
+ "type": "string",
+ "level": 30,
+ "parent": "database",
+ "mandatory": true,
+ "isValidLeaf": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks Materialized View",
+ "description": "StarRocks Materialized View",
+ "accessTypeRestrictions": [
+ "select",
+ "refresh",
+ "drop",
+ "alter"
+ ]
+ },
+ {
+ "itemId": 7,
+ "name": "function",
+ "type": "string",
+ "level": 30,
+ "parent": "database",
+ "mandatory": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks Function",
+ "description": "StarRocks Function",
+ "accessTypeRestrictions": [
+ "usage",
+ "drop"
+ ]
+ },
+ {
+ "itemId": 8,
+ "name": "global_function",
+ "type": "string",
+ "level": 10,
+ "parent": "",
+ "mandatory": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks Global Function",
+ "description": "StarRocks Global Function",
+ "accessTypeRestrictions": [
+ "usage",
+ "drop"
+ ]
+ },
+ {
+ "itemId": 9,
+ "name": "resource",
+ "type": "string",
+ "level": 10,
+ "parent": "",
+ "mandatory": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks Resource",
+ "description": "StarRocks Resource",
+ "accessTypeRestrictions": [
+ "usage",
+ "alter",
+ "drop"
+ ]
+ },
+ {
+ "itemId": 10,
+ "name": "resource_group",
+ "type": "string",
+ "level": 10,
+ "parent": "",
+ "mandatory": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks Resource Group",
+ "description": "StarRocks Resource Group",
+ "accessTypeRestrictions": [
+ "alter",
+ "drop"
+ ]
+ },
+ {
+ "itemId": 11,
+ "name": "storage_volume",
+ "type": "string",
+ "level": 10,
+ "parent": "",
+ "mandatory": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks Storage Volume",
+ "description": "StarRocks Storage Volume",
+ "accessTypeRestrictions": [
+ "drop",
+ "alter",
+ "usage"
+ ]
+ },
+ {
+ "itemId": 12,
+ "name": "user",
+ "type": "string",
+ "level": 10,
+ "parent": "",
+ "mandatory": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks Storage",
+ "description": "StarRocks Storage",
+ "accessTypeRestrictions": [
+ "impersonate"
+ ]
+ },
+ {
+ "itemId": 13,
+ "name": "system",
+ "type": "string",
+ "level": 10,
+ "parent": "",
+ "mandatory": true,
+ "isValidLeaf": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": true,
+ "ignoreCase": true
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "StarRocks System",
+ "description": "StarRocks System",
+ "accessTypeRestrictions": [
+ "grant",
+ "node",
+ "create resource",
+ "plugin",
+ "file",
+ "blacklist",
+ "operate",
+ "create external catalog",
+ "repository",
+ "create resource group",
+ "create global function",
+ "create storage volume"
+ ]
+ }
+ ],
+ "accessTypes": [
+ {
+ "itemId": 1,
+ "name": "grant",
+ "label": "GRANT"
+ },
+ {
+ "itemId": 2,
+ "name": "node",
+ "label": "NODE"
+ },
+ {
+ "itemId": 3,
+ "name": "operate",
+ "label": "OPERATE"
+ },
+ {
+ "itemId": 4,
+ "name": "delete",
+ "label": "DELETE"
+ },
+ {
+ "itemId": 5,
+ "name": "drop",
+ "label": "DROP"
+ },
+ {
+ "itemId": 6,
+ "name": "insert",
+ "label": "INSERT"
+ },
+ {
+ "itemId": 7,
+ "name": "select",
+ "label": "SELECT"
+ },
+ {
+ "itemId": 8,
+ "name": "alter",
+ "label": "ALTER"
+ },
+ {
+ "itemId": 9,
+ "name": "export",
+ "label": "EXPORT"
+ },
+ {
+ "itemId": 10,
+ "name": "update",
+ "label": "UPDATE"
+ },
+ {
+ "itemId": 11,
+ "name": "usage",
+ "label": "USAGE"
+ },
+ {
+ "itemId": 12,
+ "name": "plugin",
+ "label": "PLUGIN"
+ },
+ {
+ "itemId": 13,
+ "name": "file",
+ "label": "FILE"
+ },
+ {
+ "itemId": 14,
+ "name": "blacklist",
+ "label": "BLACKLIST"
+ },
+ {
+ "itemId": 15,
+ "name": "repository",
+ "label": "REPOSITORY"
+ },
+ {
+ "itemId": 16,
+ "name": "refresh",
+ "label": "REFRESH"
+ },
+ {
+ "itemId": 17,
+ "name": "impersonate",
+ "label": "IMPERSONATE"
+ },
+ {
+ "itemId": 18,
+ "name": "create database",
+ "label": "CREATE DATABASE"
+ },
+ {
+ "itemId": 19,
+ "name": "create table",
+ "label": "CREATE TABLE"
+ },
+ {
+ "itemId": 20,
+ "name": "create view",
+ "label": "CREATE VIEW"
+ },
+ {
+ "itemId": 21,
+ "name": "create function",
+ "label": "CREATE FUNCTION"
+ },
+ {
+ "itemId": 22,
+ "name": "create global function",
+ "label": "CREATE GLOBAL FUNCTION"
+ },
+ {
+ "itemId": 23,
+ "name": "create materialized view",
+ "label": "CREATE MATERIALIZED VIEW"
+ },
+ {
+ "itemId": 24,
+ "name": "create resource",
+ "label": "CREATE RESOURCE"
+ },
+ {
+ "itemId": 25,
+ "name": "create resource group",
+ "label": "CREATE RESOURCE GROUP"
+ },
+ {
+ "itemId": 26,
+ "name": "create external catalog",
+ "label": "CREATE EXTERNAL CATALOG"
+ },
+ {
+ "itemId": 27,
+ "name": "create storage volume",
+ "label": "CREATE STORAGE VOLUME"
+ }
+ ],
+ "configs": [
+ {
+ "itemId": 1,
+ "name": "username",
+ "type": "string",
+ "mandatory": true,
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "Username"
+ },
+ {
+ "itemId": 2,
+ "name": "password",
+ "type": "password",
+ "mandatory": false,
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "Password"
+ },
+ {
+ "itemId": 3,
+ "name": "jdbc.driverClassName",
+ "type": "string",
+ "mandatory": true,
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "defaultValue": "com.mysql.cj.jdbc.Driver"
+ },
+ {
+ "itemId": 4,
+ "name": "jdbc.url",
+ "type": "string",
+ "mandatory": true,
+ "defaultValue": "jdbc:mysql://127.0.0.1:9030",
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": ""
+ }
+ ],
+ "enums": [
+ ],
+ "contextEnrichers": [
+ ],
+ "policyConditions": [
+ {
+ "itemId": 100,
+ "name": "ip-range",
+ "evaluator": "org.apache.ranger.plugin.conditionevaluator.RangerIpMatcher",
+ "evaluatorOptions": {
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "IP Address Range",
+ "description": "IP Address Range"
+ }
+ ],
+ "dataMaskDef": {
+ "accessTypes": [
+ {
+ "name": "select"
+ }
+ ],
+ "resources": [
+ {
+ "name": "catalog",
+ "matcherOptions": {
+ "wildCard": "true"
+ },
+ "lookupSupported": true,
+ "uiHint": "{ \"singleValue\":true }"
+ },
+ {
+ "name": "database",
+ "matcherOptions": {
+ "wildCard": "true"
+ },
+ "lookupSupported": true,
+ "uiHint": "{ \"singleValue\":true }"
+ },
+ {
+ "name": "table",
+ "matcherOptions": {
+ "wildCard": "true"
+ },
+ "lookupSupported": true,
+ "uiHint": "{ \"singleValue\":true }"
+ },
+ {
+ "name": "column",
+ "matcherOptions": {
+ "wildCard": "true"
+ },
+ "lookupSupported": true,
+ "uiHint": "{ \"singleValue\":true }"
+ }
+ ],
+ "maskTypes": [
+ {
+ "itemId": 1,
+ "name": "MASK",
+ "label": "Redact",
+ "description": "Replace lowercase with 'x', uppercase with 'X', digits with '0'",
+ "transformer": "cast(regexp_replace(regexp_replace(regexp_replace({col},'([A-Z])', 'X'),'([a-z])','x'),'([0-9])','0') as {type})",
+ "dataMaskOptions": {
+ }
+ },
+ {
+ "itemId": 2,
+ "name": "MASK_SHOW_LAST_4",
+ "label": "Partial mask: show last 4",
+ "description": "Show last 4 characters; replace rest with 'X'",
+ "transformer": "cast(regexp_replace({col}, '(.*)(.{4}$)', x -> regexp_replace(x[1], '.', 'X') || x[2]) as {type})"
+ },
+ {
+ "itemId": 3,
+ "name": "MASK_SHOW_FIRST_4",
+ "label": "Partial mask: show first 4",
+ "description": "Show first 4 characters; replace rest with 'x'",
+ "transformer": "cast(regexp_replace({col}, '(^.{4})(.*)', x -> x[1] || regexp_replace(x[2], '.', 'X')) as {type})"
+ },
+ {
+ "itemId": 4,
+ "name": "MASK_HASH",
+ "label": "Hash",
+ "description": "Hash the value of a varchar with sha256",
+ "transformer": "cast(to_hex(sha256(to_utf8({col}))) as {type})"
+ },
+ {
+ "itemId": 5,
+ "name": "MASK_NULL",
+ "label": "Nullify",
+ "description": "Replace with NULL"
+ },
+ {
+ "itemId": 6,
+ "name": "MASK_NONE",
+ "label": "Unmasked (retain original value)",
+ "description": "No masking"
+ },
+ {
+ "itemId": 12,
+ "name": "MASK_DATE_SHOW_YEAR",
+ "label": "Date: show only year",
+ "description": "Date: show only year",
+ "transformer": "date_trunc('year', {col})"
+ },
+ {
+ "itemId": 13,
+ "name": "CUSTOM",
+ "label": "Custom",
+ "description": "Custom"
+ }
+ ]
+ },
+ "rowFilterDef": {
+ "accessTypes": [
+ {
+ "name": "select"
+ }
+ ],
+ "resources": [
+ {
+ "name": "catalog",
+ "matcherOptions": {
+ "wildCard": "true"
+ },
+ "lookupSupported": true,
+ "mandatory": true,
+ "uiHint": "{ \"singleValue\":true }"
+ },
+ {
+ "name": "database",
+ "matcherOptions": {
+ "wildCard": "true"
+ },
+ "lookupSupported": true,
+ "mandatory": true,
+ "uiHint": "{ \"singleValue\":true }"
+ },
+ {
+ "name": "table",
+ "matcherOptions": {
+ "wildCard": "true"
+ },
+ "lookupSupported": true,
+ "mandatory": true,
+ "uiHint": "{ \"singleValue\":true }"
+ }
+ ]
+ }
+}
diff --git a/plugin-starrocks/.gitignore b/plugin-starrocks/.gitignore
new file mode 100644
index 0000000000..b83d22266a
--- /dev/null
+++ b/plugin-starrocks/.gitignore
@@ -0,0 +1 @@
+/target/
diff --git a/plugin-starrocks/conf/ranger-policymgr-ssl.xml b/plugin-starrocks/conf/ranger-policymgr-ssl.xml
new file mode 100644
index 0000000000..a729d4a0b4
--- /dev/null
+++ b/plugin-starrocks/conf/ranger-policymgr-ssl.xml
@@ -0,0 +1,49 @@
+
+
+
+
+
+
+ xasecure.policymgr.clientssl.keystore
+ starrocksservice-clientcert.jks
+
+ Java Keystore files
+
+
+
+ xasecure.policymgr.clientssl.truststore
+ cacerts-xasecure.jks
+
+ java truststore file
+
+
+
+ xasecure.policymgr.clientssl.keystore.credential.file
+ jceks://file/tmp/keystore-starrocksservice-ssl.jceks
+
+ java keystore credential file
+
+
+
+ xasecure.policymgr.clientssl.truststore.credential.file
+ jceks://file/tmp/truststore-starrocksservice-ssl.jceks
+
+ java truststore credential file
+
+
+
diff --git a/plugin-starrocks/conf/ranger-starrocks-audit.xml b/plugin-starrocks/conf/ranger-starrocks-audit.xml
new file mode 100644
index 0000000000..e6032bb908
--- /dev/null
+++ b/plugin-starrocks/conf/ranger-starrocks-audit.xml
@@ -0,0 +1,178 @@
+
+
+
+
+
+ xasecure.audit.is.enabled
+ true
+
+
+
+
+ xasecure.audit.hdfs.is.enabled
+ false
+
+
+
+ xasecure.audit.hdfs.is.async
+ true
+
+
+
+ xasecure.audit.hdfs.async.max.queue.size
+ 1048576
+
+
+
+ xasecure.audit.hdfs.async.max.flush.interval.ms
+ 30000
+
+
+
+ xasecure.audit.hdfs.config.encoding
+
+
+
+
+ xasecure.audit.hdfs.config.destination.directory
+ hdfs://NAMENODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%
+
+
+
+ xasecure.audit.hdfs.config.destination.file
+ %hostname%-audit.log
+
+
+
+ xasecure.audit.hdfs.config.destination.flush.interval.seconds
+ 900
+
+
+
+ xasecure.audit.hdfs.config.destination.rollover.interval.seconds
+ 86400
+
+
+
+ xasecure.audit.hdfs.config.destination.open.retry.interval.seconds
+ 60
+
+
+
+ xasecure.audit.hdfs.config.local.buffer.directory
+ /var/log/starrocks/audit
+
+
+
+ xasecure.audit.hdfs.config.local.buffer.file
+ %time:yyyyMMdd-HHmm.ss%.log
+
+
+
+ xasecure.audit.hdfs.config.local.buffer.file.buffer.size.bytes
+ 8192
+
+
+
+ xasecure.audit.hdfs.config.local.buffer.flush.interval.seconds
+ 60
+
+
+
+ xasecure.audit.hdfs.config.local.buffer.rollover.interval.seconds
+ 600
+
+
+
+ xasecure.audit.hdfs.config.local.archive.directory
+ /var/log/starrocks/audit/archive
+
+
+
+ xasecure.audit.hdfs.config.local.archive.max.file.count
+ 10
+
+
+
+ xasecure.audit.log4j.is.enabled
+ false
+
+
+
+ xasecure.audit.log4j.is.async
+ false
+
+
+
+ xasecure.audit.log4j.async.max.queue.size
+ 10240
+
+
+
+ xasecure.audit.log4j.async.max.flush.interval.ms
+ 30000
+
+
+
+
+
+ xasecure.audit.starrocks.is.enabled
+ true
+
+
+
+ xasecure.audit.starrocks.async.max.queue.size
+ 1
+
+
+
+ xasecure.audit.starrocks.async.max.flush.interval.ms
+ 1000
+
+
+
+ xasecure.audit.starrocks.broker_list
+ localhost:9092
+
+
+
+ xasecure.audit.starrocks.topic_name
+ ranger_audits
+
+
+
+
+ xasecure.audit.solr.is.enabled
+ true
+
+
+
+ xasecure.audit.solr.async.max.queue.size
+ 1
+
+
+
+ xasecure.audit.solr.async.max.flush.interval.ms
+ 100
+
+
+
+ xasecure.audit.solr.solr_url
+ http://127.0.0.1:6083/solr/ranger_audits
+
+
diff --git a/plugin-starrocks/conf/ranger-starrocks-security.xml b/plugin-starrocks/conf/ranger-starrocks-security.xml
new file mode 100644
index 0000000000..bdfe321f81
--- /dev/null
+++ b/plugin-starrocks/conf/ranger-starrocks-security.xml
@@ -0,0 +1,74 @@
+
+
+
+
+ ranger.plugin.starrocks.service.name
+ starrocks
+
+ Name of the Ranger service containing policies for this StarRocks instance
+
+
+
+
+ ranger.plugin.starrocks.policy.source.impl
+ org.apache.ranger.admin.client.RangerAdminRESTClient
+
+ Class to retrieve policies from the source
+
+
+
+
+ ranger.plugin.starrocks.policy.rest.url
+ http://localhost:6080
+
+ URL to Ranger Admin
+
+
+
+
+ ranger.plugin.starrocks.policy.rest.ssl.config.file
+ /etc/hadoop/conf/ranger-policymgr-ssl.xml
+
+ Path to the file containing SSL details to contact Ranger Admin
+
+
+
+
+ ranger.plugin.starrocks.policy.pollIntervalMs
+ 30000
+
+ How often to poll for changes in policies?
+
+
+
+
+ ranger.plugin.starrocks.policy.rest.client.connection.timeoutMs
+ 30000
+
+ S3 Plugin RangerRestClient Connection Timeout in Milli Seconds
+
+
+
+
+ ranger.plugin.starrocks.policy.rest.client.read.timeoutMs
+ 30000
+
+ S3 Plugin RangerRestClient read Timeout in Milli Seconds
+
+
+
diff --git a/plugin-starrocks/pom.xml b/plugin-starrocks/pom.xml
new file mode 100644
index 0000000000..f29e1bf256
--- /dev/null
+++ b/plugin-starrocks/pom.xml
@@ -0,0 +1,131 @@
+
+
+
+ 4.0.0
+ ranger-starrocks-plugin
+ StarRocks Security Plugin
+ StarRocks Security Plugin
+ jar
+
+ UTF-8
+
+
+ org.apache.ranger
+ ranger
+ 3.0.0-SNAPSHOT
+ ..
+
+
+
+ org.apache.httpcomponents
+ httpcore
+ ${httpcomponents.httpcore.version}
+
+
+ org.apache.zookeeper
+ zookeeper
+ ${zookeeper.version}
+
+
+ io.netty
+ netty
+
+
+ log4j
+ *
+
+
+ org.slf4j
+ *
+
+
+
+
+ org.slf4j
+ log4j-over-slf4j
+ ${slf4j.version}
+ test
+
+
+ org.apache.ranger
+ ranger-plugins-common
+ ${project.version}
+
+
+ org.apache.ranger
+ ranger-plugins-audit
+ ${project.version}
+
+
+ org.apache.commons
+ commons-lang3
+ ${commons.lang3.version}
+
+
+ junit
+ junit
+
+
+ org.apache.commons
+ commons-compress
+ ${commons.compress.version}
+
+
+ org.apache.htrace
+ htrace-core4
+ ${htrace-core.version}
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+
+
+ org.slf4j
+ jcl-over-slf4j
+ ${slf4j.version}
+
+
+ ch.qos.logback
+ logback-classic
+ ${logback.version}
+
+
+ ch.qos.logback
+ logback-core
+ ${logback.version}
+
+
+ dnsjava
+ dnsjava
+ ${dnsjava.version}
+
+
+
+
+
+ src/test/resources
+
+ **/*
+
+ true
+
+
+
+
diff --git a/plugin-starrocks/src/dev-support/findbugsIncludeFile.xml b/plugin-starrocks/src/dev-support/findbugsIncludeFile.xml
new file mode 100644
index 0000000000..8623906bda
--- /dev/null
+++ b/plugin-starrocks/src/dev-support/findbugsIncludeFile.xml
@@ -0,0 +1,25 @@
+
+
+
+
+
+
+
+
diff --git a/plugin-starrocks/src/dev-support/ranger-pmd-ruleset.xml b/plugin-starrocks/src/dev-support/ranger-pmd-ruleset.xml
new file mode 100644
index 0000000000..9e5ed7a5db
--- /dev/null
+++ b/plugin-starrocks/src/dev-support/ranger-pmd-ruleset.xml
@@ -0,0 +1,121 @@
+
+
+
+
+ Apache Ranger - PMD rule set
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/RangerServiceStarRocks.java b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/RangerServiceStarRocks.java
new file mode 100644
index 0000000000..f086e6e965
--- /dev/null
+++ b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/RangerServiceStarRocks.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ranger.services.starrocks;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.ranger.plugin.client.HadoopConfigHolder;
+import org.apache.ranger.plugin.client.HadoopException;
+import org.apache.ranger.plugin.model.RangerPolicy;
+import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyItem;
+import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyItemAccess;
+import org.apache.ranger.plugin.service.RangerBaseService;
+import org.apache.ranger.plugin.service.ResourceLookupContext;
+import org.apache.ranger.services.starrocks.client.StarRocksResourceManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class RangerServiceStarRocks extends RangerBaseService {
+ private static final Logger LOG = LoggerFactory.getLogger(RangerServiceStarRocks.class);
+
+ public static final String ACCESS_TYPE_SELECT = "select";
+
+ @Override
+ public List getDefaultRangerPolicies() throws Exception {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("==> RangerServiceStarRocks.getDefaultRangerPolicies()");
+ }
+
+ List ret = super.getDefaultRangerPolicies();
+ for (RangerPolicy defaultPolicy : ret) {
+ if (defaultPolicy.getName().contains("all") && StringUtils.isNotBlank(lookUpUser)) {
+ List accessListForLookupUser = new ArrayList();
+ accessListForLookupUser.add(new RangerPolicyItemAccess(ACCESS_TYPE_SELECT));
+ RangerPolicyItem policyItemForLookupUser = new RangerPolicyItem();
+ policyItemForLookupUser.setUsers(Collections.singletonList(lookUpUser));
+ policyItemForLookupUser.setAccesses(accessListForLookupUser);
+ policyItemForLookupUser.setDelegateAdmin(false);
+ defaultPolicy.getPolicyItems().add(policyItemForLookupUser);
+ }
+ }
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== RangerServiceStarRocks.getDefaultRangerPolicies()");
+ }
+ return ret;
+ }
+
+ @Override
+ public Map validateConfig() throws Exception {
+ Map ret = new HashMap();
+ String serviceName = getServiceName();
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("RangerServiceStarRocks.validateConfig(): Service: " +
+ serviceName);
+ }
+
+ if (configs != null) {
+ try {
+ if (!configs.containsKey(HadoopConfigHolder.RANGER_LOGIN_PASSWORD)) {
+ configs.put(HadoopConfigHolder.RANGER_LOGIN_PASSWORD, null);
+ }
+ ret = StarRocksResourceManager.connectionTest(serviceName, configs);
+ } catch (HadoopException he) {
+ LOG.error("<== RangerServiceStarRocks.validateConfig() Error:" + he);
+ throw he;
+ }
+ }
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("RangerServiceStarRocks.validateConfig(): Response: " + ret);
+ }
+ return ret;
+ }
+
+ @Override
+ public List lookupResource(ResourceLookupContext context) throws Exception {
+
+ List ret = new ArrayList();
+ String serviceName = getServiceName();
+ String serviceType = getServiceType();
+ Map configs = getConfigs();
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("==> RangerServiceStarRocks.lookupResource() Context: (" + context + ")");
+ }
+ if (context != null) {
+ try {
+ if (!configs.containsKey(HadoopConfigHolder.RANGER_LOGIN_PASSWORD)) {
+ configs.put(HadoopConfigHolder.RANGER_LOGIN_PASSWORD, null);
+ }
+ ret = StarRocksResourceManager.getStarRocksResources(serviceName, serviceType, configs, context);
+ } catch (Exception e) {
+ LOG.error("<==RangerServiceStarRocks.lookupResource() Error : " + e);
+ throw e;
+ }
+ }
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== RangerServiceStarRocks.lookupResource() Response: (" + ret + ")");
+ }
+ return ret;
+ }
+
+}
diff --git a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksClient.java b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksClient.java
new file mode 100644
index 0000000000..8e8aa8357d
--- /dev/null
+++ b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksClient.java
@@ -0,0 +1,575 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ranger.services.starrocks.client;
+
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.lang.StringEscapeUtils;
+import org.apache.ranger.plugin.client.BaseClient;
+import org.apache.ranger.plugin.client.HadoopConfigHolder;
+import org.apache.ranger.plugin.client.HadoopException;
+import org.apache.ranger.plugin.util.PasswordUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.Closeable;
+import java.security.PrivilegedAction;
+import java.sql.Connection;
+import java.sql.Driver;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.SQLTimeoutException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import javax.security.auth.Subject;
+
+public class StarRocksClient extends BaseClient implements Closeable {
+ public static final String STARROCKS_USER_NAME_PROP = "user";
+ public static final String STARROCKS_PASSWORD_PROP = "password";
+
+ private static final Logger LOG = LoggerFactory.getLogger(StarRocksClient.class);
+
+ private static final String ERR_MSG = "You can still save the repository and start creating "
+ + "policies, but you would not be able to use autocomplete for "
+ + "resource names. Check ranger_admin.log for more info.";
+
+ private Connection con;
+
+ public StarRocksClient(String serviceName) throws Exception {
+ super(serviceName, null);
+ init();
+ }
+
+ public StarRocksClient(String serviceName, Map properties) throws Exception {
+ super(serviceName, properties);
+ init();
+ }
+
+ private void init() throws Exception {
+ Subject.doAs(getLoginSubject(), new PrivilegedAction() {
+ public Void run() {
+ initConnection();
+ return null;
+ }
+ });
+ }
+
+ private void initConnection() {
+ Properties prop = getConfigHolder().getRangerSection();
+ String driverClassName = prop.getProperty("jdbc.driverClassName");
+ String url = prop.getProperty("jdbc.url");
+
+ Properties starrocksProperties = new Properties();
+ String decryptedPwd = null;
+ try {
+ decryptedPwd = PasswordUtils.decryptPassword(getConfigHolder().getPassword());
+ } catch (Exception ex) {
+ LOG.info("Password decryption failed");
+ decryptedPwd = null;
+ } finally {
+ if (decryptedPwd == null) {
+ decryptedPwd = prop.getProperty(HadoopConfigHolder.RANGER_LOGIN_PASSWORD);
+ }
+ }
+ starrocksProperties.put(STARROCKS_USER_NAME_PROP, prop.getProperty(HadoopConfigHolder.RANGER_LOGIN_USER_NAME_PROP));
+ if (prop.getProperty(HadoopConfigHolder.RANGER_LOGIN_PASSWORD) != null) {
+ starrocksProperties.put(STARROCKS_PASSWORD_PROP, decryptedPwd);
+ }
+
+ if (driverClassName != null) {
+ try {
+ Driver driver = (Driver) Class.forName(driverClassName).newInstance();
+ DriverManager.registerDriver(driver);
+ } catch (SQLException e) {
+ String msgDesc = "initConnection: Caught SQLException while registering"
+ + " the StarRocks driver.";
+ HadoopException hdpException = new HadoopException(msgDesc, e);
+ hdpException.generateResponseDataMap(false, getMessage(e),
+ msgDesc + ERR_MSG, null, null);
+ throw hdpException;
+ } catch (IllegalAccessException ilae) {
+ String msgDesc = "initConnection: Class or its nullary constructor might not accessible.";
+ HadoopException hdpException = new HadoopException(msgDesc, ilae);
+ hdpException.generateResponseDataMap(false, getMessage(ilae),
+ msgDesc + ERR_MSG, null, null);
+ throw hdpException;
+ } catch (InstantiationException ie) {
+ String msgDesc = "initConnection: Class may not have its nullary constructor or "
+ + "may be the instantiation fails for some other reason.";
+ HadoopException hdpException = new HadoopException(msgDesc, ie);
+ hdpException.generateResponseDataMap(false, getMessage(ie),
+ msgDesc + ERR_MSG, null, null);
+ throw hdpException;
+ } catch (ExceptionInInitializerError eie) {
+ String msgDesc = "initConnection: Got ExceptionInInitializerError, "
+ + "The initialization provoked by this method fails.";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ eie);
+ hdpException.generateResponseDataMap(false, getMessage(eie),
+ msgDesc + ERR_MSG, null, null);
+ throw hdpException;
+ } catch (SecurityException se) {
+ String msgDesc = "initConnection: unable to initiate connection to StarRocks instance,"
+ + " The caller's class loader is not the same as or an ancestor "
+ + "of the class loader for the current class and invocation of "
+ + "s.checkPackageAccess() denies access to the package of this class.";
+ HadoopException hdpException = new HadoopException(msgDesc, se);
+ hdpException.generateResponseDataMap(false, getMessage(se),
+ msgDesc + ERR_MSG, null, null);
+ throw hdpException;
+ } catch (Throwable t) {
+ String msgDesc = "initConnection: Unable to connect to StarRocks instance, "
+ + "please provide valid value of field : {jdbc.driverClassName}.";
+ HadoopException hdpException = new HadoopException(msgDesc, t);
+ hdpException.generateResponseDataMap(false, getMessage(t),
+ msgDesc + ERR_MSG, null, null);
+ throw hdpException;
+ }
+ }
+
+ try {
+ con = DriverManager.getConnection(url, starrocksProperties);
+ } catch (SQLException e) {
+ String msgDesc = "Unable to connect to StarRocks instance.";
+ HadoopException hdpException = new HadoopException(msgDesc, e);
+ hdpException.generateResponseDataMap(false, getMessage(e),
+ msgDesc + ERR_MSG, null, null);
+ throw hdpException;
+ } catch (SecurityException se) {
+ String msgDesc = "Unable to connect to StarRocks instance.";
+ HadoopException hdpException = new HadoopException(msgDesc, se);
+ hdpException.generateResponseDataMap(false, getMessage(se),
+ msgDesc + ERR_MSG, null, null);
+ throw hdpException;
+ } catch (Throwable t) {
+ String msgDesc = "initConnection: Unable to connect to StarRocks instance, ";
+ HadoopException hdpException = new HadoopException(msgDesc, t);
+ hdpException.generateResponseDataMap(false, getMessage(t),
+ msgDesc + ERR_MSG, null, null);
+ throw hdpException;
+ }
+
+ }
+
+ private List getCatalogs(String needle, List catalogs) throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ Statement stat = null;
+ ResultSet rs = null;
+ String sql = "SHOW CATALOGS";
+
+ try {
+ if (needle != null && !needle.isEmpty() && !needle.equals("*")) {
+ // Cannot use a prepared statement for this as starrocks does not support that
+ sql += " LIKE '" + StringEscapeUtils.escapeSql(needle) + "%'";
+ }
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String catalogName = rs.getString(1);
+ if (catalogs != null && catalogs.contains(catalogName)) {
+ continue;
+ }
+ ret.add(catalogName);
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ } catch (SQLException se) {
+ String msg = "Unable to execute SQL [" + sql + "]. ";
+ HadoopException he = new HadoopException(msg, se);
+ he.generateResponseDataMap(false, getMessage(se), msg + ERR_MSG,
+ null, null);
+ throw he;
+ } finally {
+ close(rs);
+ close(stat);
+ }
+ }
+ return ret;
+ }
+
+ public List getCatalogList(String needle, final List catalogs) throws HadoopException {
+ final String ndl = needle;
+ final List catList = catalogs;
+
+ List dbs = Subject.doAs(getLoginSubject(), new PrivilegedAction>() {
+ @Override
+ public List run() {
+ List ret = null;
+ try {
+ ret = getCatalogs(ndl, catList);
+ } catch (HadoopException he) {
+ LOG.error("<== StarRocksClient.getCatalogList() :Unable to get the Database List", he);
+ throw he;
+ }
+ return ret;
+ }
+ });
+
+ return dbs;
+ }
+
+ private List getDatabases(String needle, List catalogs, List schemas) throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ Statement stat = null;
+ ResultSet rs = null;
+ String sql = null;
+
+ try {
+ if (catalogs != null && !catalogs.isEmpty()) {
+ for (String catalog : catalogs) {
+ sql = "SHOW DATABASES FROM `" + StringEscapeUtils.escapeSql(catalog) + "`";
+
+ try {
+ if (needle != null && !needle.isEmpty() && !needle.equals("*")) {
+ sql += " LIKE '" + StringEscapeUtils.escapeSql(needle) + "%'";
+ }
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String schema = rs.getString(1);
+ if (schemas != null && schemas.contains(schema)) {
+ continue;
+ }
+ ret.add(schema);
+ }
+ } finally {
+ close(rs);
+ close(stat);
+ rs = null;
+ stat = null;
+ }
+ }
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getSchemas() Error : ", sqlt);
+ }
+ throw hdpException;
+ } catch (SQLException sqle) {
+ String msgDesc = "Unable to execute SQL [" + sql + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqle);
+ hdpException.generateResponseDataMap(false, getMessage(sqle),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getSchemas() Error : ", sqle);
+ }
+ throw hdpException;
+ }
+ }
+
+ return ret;
+ }
+
+ public List getDatabaseList(String needle, List catalogs, List schemas) throws HadoopException {
+ final String ndl = needle;
+ final List cats = catalogs;
+ final List shms = schemas;
+
+ List schemaList = Subject.doAs(getLoginSubject(), new PrivilegedAction>() {
+ @Override
+ public List run() {
+ List ret = null;
+ try {
+ ret = getDatabases(ndl, cats, shms);
+ } catch (HadoopException he) {
+ LOG.error("<== StarRocksClient.getSchemaList() :Unable to get the Schema List", he);
+ }
+ return ret;
+ }
+ });
+
+ return schemaList;
+ }
+
+ private List getTables(String needle, List catalogs, List schemas, List tables)
+ throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ Statement stat = null;
+ ResultSet rs = null;
+ String sql = null;
+
+ if (catalogs != null && !catalogs.isEmpty()
+ && schemas != null && !schemas.isEmpty()) {
+ try {
+ for (String catalog : catalogs) {
+ for (String schema : schemas) {
+ sql = "SHOW tables FROM `" + StringEscapeUtils.escapeSql(catalog) + "`.`" +
+ StringEscapeUtils.escapeSql(schema) + "`";
+ try {
+ if (needle != null && !needle.isEmpty() && !needle.equals("*")) {
+ sql += " LIKE '" + StringEscapeUtils.escapeSql(needle) + "%'";
+ }
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String table = rs.getString(1);
+ if (tables != null && tables.contains(table)) {
+ continue;
+ }
+ ret.add(table);
+ }
+ } finally {
+ close(rs);
+ close(stat);
+ rs = null;
+ stat = null;
+ }
+ }
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqlt);
+ }
+ throw hdpException;
+ } catch (SQLException sqle) {
+ String msgDesc = "Unable to execute SQL [" + sql + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqle);
+ hdpException.generateResponseDataMap(false, getMessage(sqle),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqle);
+ }
+ throw hdpException;
+ }
+ }
+ }
+ return ret;
+ }
+
+ public List getTableList(String needle, List catalogs, List schemas, List tables)
+ throws HadoopException {
+ final String ndl = needle;
+ final List cats = catalogs;
+ final List shms = schemas;
+ final List tbls = tables;
+
+ List tableList = Subject.doAs(getLoginSubject(), new PrivilegedAction>() {
+ @Override
+ public List run() {
+ List ret = null;
+ try {
+ ret = getTables(ndl, cats, shms, tbls);
+ } catch (HadoopException he) {
+ LOG.error("<== StarRocksClient.getTableList() :Unable to get the Column List", he);
+ throw he;
+ }
+ return ret;
+ }
+ });
+
+ return tableList;
+ }
+
+ private List getColumns(String needle, List catalogs, List schemas, List tables,
+ List columns) throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ String regex = null;
+ ResultSet rs = null;
+ String sql = null;
+ Statement stat = null;
+
+ if (needle != null && !needle.isEmpty()) {
+ regex = needle;
+ }
+
+ if (catalogs != null && !catalogs.isEmpty()
+ && schemas != null && !schemas.isEmpty()
+ && tables != null && !tables.isEmpty()) {
+ try {
+ for (String catalog : catalogs) {
+ for (String schema : schemas) {
+ for (String table : tables) {
+ sql = "SHOW COLUMNS FROM `" + StringEscapeUtils.escapeSql(catalog) + "`." +
+ "`" + StringEscapeUtils.escapeSql(schema) + "`." +
+ "`" + StringEscapeUtils.escapeSql(table) + "`";
+
+ try {
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String column = rs.getString(1);
+ if (columns != null && columns.contains(column)) {
+ continue;
+ }
+ if (regex == null) {
+ ret.add(column);
+ } else if (FilenameUtils.wildcardMatch(column, regex)) {
+ ret.add(column);
+ }
+ }
+ } finally {
+ close(rs);
+ close(stat);
+ stat = null;
+ rs = null;
+ }
+ }
+ }
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getColumns() Error : ", sqlt);
+ }
+ throw hdpException;
+ } catch (SQLException sqle) {
+ String msgDesc = "Unable to execute SQL [" + sql + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqle);
+ hdpException.generateResponseDataMap(false, getMessage(sqle),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getColumns() Error : ", sqle);
+ }
+ throw hdpException;
+ }
+ }
+ }
+ return ret;
+ }
+
+ public List getColumnList(String needle, List catalogs, List schemas, List tables,
+ List columns) throws HadoopException {
+ final String ndl = needle;
+ final List cats = catalogs;
+ final List shms = schemas;
+ final List tbls = tables;
+ final List cols = columns;
+
+ List columnList = Subject.doAs(getLoginSubject(), new PrivilegedAction>() {
+ @Override
+ public List run() {
+ List ret = null;
+ try {
+ ret = getColumns(ndl, cats, shms, tbls, cols);
+ } catch (HadoopException he) {
+ LOG.error("<== StarRocksClient.getColumnList() :Unable to get the Column List", he);
+ throw he;
+ }
+ return ret;
+ }
+ });
+ return columnList;
+ }
+
+ public static Map connectionTest(String serviceName,
+ Map connectionProperties)
+ throws Exception {
+ StarRocksClient client = null;
+ Map resp = new HashMap();
+
+ boolean status = false;
+
+ List testResult = null;
+
+ try {
+ client = new StarRocksClient(serviceName, connectionProperties);
+ if (client != null) {
+ testResult = client.getCatalogList("*", null);
+ if (testResult != null && testResult.size() != 0) {
+ status = true;
+ }
+ }
+
+ if (status) {
+ String msg = "Connection test successful";
+ generateResponseDataMap(status, msg, msg, null, null, resp);
+ }
+ } catch (Exception e) {
+ throw e;
+ } finally {
+ if (client != null) {
+ client.close();
+ }
+ }
+
+ return resp;
+ }
+
+ public void close() {
+ Subject.doAs(getLoginSubject(), new PrivilegedAction() {
+ public Void run() {
+ close(con);
+ return null;
+ }
+ });
+ }
+
+ private void close(Connection con) {
+ try {
+ if (con != null) {
+ con.close();
+ }
+ } catch (SQLException e) {
+ LOG.error("Unable to close StarRocks SQL connection", e);
+ }
+ }
+
+ public void close(Statement stat) {
+ try {
+ if (stat != null) {
+ stat.close();
+ }
+ } catch (SQLException e) {
+ LOG.error("Unable to close SQL statement", e);
+ }
+ }
+
+ public void close(ResultSet rs) {
+ try {
+ if (rs != null) {
+ rs.close();
+ }
+ } catch (SQLException e) {
+ LOG.error("Unable to close ResultSet", e);
+ }
+ }
+}
diff --git a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksConnectionManager.java b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksConnectionManager.java
new file mode 100644
index 0000000000..26fbcbb8f7
--- /dev/null
+++ b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksConnectionManager.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ranger.services.starrocks.client;
+
+import org.apache.ranger.plugin.util.TimedEventUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.TimeUnit;
+
+public class StarRocksConnectionManager {
+ private static final Logger LOG = LoggerFactory.getLogger(StarRocksConnectionManager.class);
+
+ protected ConcurrentMap starrocksConnectionCache;
+ protected ConcurrentMap repoConnectStatusMap;
+
+ public StarRocksConnectionManager() {
+ starrocksConnectionCache = new ConcurrentHashMap<>();
+ repoConnectStatusMap = new ConcurrentHashMap<>();
+ }
+
+ public StarRocksClient getStarRocksConnection(final String serviceName, final String serviceType,
+ final Map configs) {
+ StarRocksClient starrocksClient = null;
+
+ if (serviceType != null) {
+ starrocksClient = starrocksConnectionCache.get(serviceName);
+ if (starrocksClient == null) {
+ if (configs != null) {
+ final Callable connectStarRocks = new Callable() {
+ @Override
+ public StarRocksClient call() throws Exception {
+ return new StarRocksClient(serviceName, configs);
+ }
+ };
+ try {
+ starrocksClient = TimedEventUtil.timedTask(connectStarRocks, 5, TimeUnit.SECONDS);
+ } catch (Exception e) {
+ LOG.error("Error connecting to StarRocks repository: " +
+ serviceName + " using config: " + configs, e);
+ }
+
+ StarRocksClient oldClient = null;
+ if (starrocksClient != null) {
+ oldClient = starrocksConnectionCache.putIfAbsent(serviceName, starrocksClient);
+ } else {
+ oldClient = starrocksConnectionCache.get(serviceName);
+ }
+
+ if (oldClient != null) {
+ if (starrocksClient != null) {
+ starrocksClient.close();
+ }
+ starrocksClient = oldClient;
+ }
+ repoConnectStatusMap.put(serviceName, true);
+ } else {
+ LOG.error("Connection Config not defined for asset :"
+ + serviceName, new Throwable());
+ }
+ } else {
+ try {
+ starrocksClient.getCatalogList("*", null);
+ } catch (Exception e) {
+ starrocksConnectionCache.remove(serviceName);
+ starrocksClient.close();
+ starrocksClient = getStarRocksConnection(serviceName, serviceType, configs);
+ }
+ }
+ } else {
+ LOG.error("Asset not found with name " + serviceName, new Throwable());
+ }
+ return starrocksClient;
+ }
+}
diff --git a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java
new file mode 100644
index 0000000000..89f938d744
--- /dev/null
+++ b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ranger.services.starrocks.client;
+
+import org.apache.ranger.plugin.service.ResourceLookupContext;
+import org.apache.ranger.plugin.util.TimedEventUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+public class StarRocksResourceManager {
+ private static final Logger LOG = LoggerFactory.getLogger(StarRocksResourceManager.class);
+
+ private static final String CATALOG = "catalog";
+ private static final String DATABASE = "database";
+ private static final String TABLE = "table";
+ private static final String COLUMN = "column";
+
+ public static Map connectionTest(String serviceName, Map configs) throws Exception {
+ Map ret = null;
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("==> StarRocksResourceManager.connectionTest() ServiceName: " + serviceName + " Configs: " + configs);
+ }
+
+ try {
+ ret = StarRocksClient.connectionTest(serviceName, configs);
+ } catch (Exception e) {
+ LOG.error("<== StarRocksResourceManager.connectionTest() Error: " + e);
+ throw e;
+ }
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksResourceManager.connectionTest() Result : " + ret);
+ }
+
+ return ret;
+ }
+
+ public static List getStarRocksResources(String serviceName, String serviceType, Map configs,
+ ResourceLookupContext context) throws Exception {
+
+ String userInput = context.getUserInput();
+ String resource = context.getResourceName();
+ Map> resourceMap = context.getResources();
+ List resultList = null;
+ List catalogList = null;
+ List databaseList = null;
+ List tableList = null;
+ List columnList = null;
+
+ String catalogName = null;
+ String databaseName = null;
+ String tableName = null;
+ String columnName = null;
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksResourceManager.getStarRocksResources() UserInput: \"" + userInput + "\" resource : " + resource +
+ " resourceMap: " + resourceMap);
+ }
+
+ LOG.error("UserInput : " + userInput + " ," + "Resource : " + resource);
+
+ if (userInput == null) {
+ LOG.error("UserInput is null");
+ }
+
+ if (userInput == null || userInput.isEmpty()) {
+ userInput = "*";
+ }
+
+ if (resource != null) {
+ if (resourceMap != null && !resourceMap.isEmpty()) {
+ catalogList = resourceMap.get(CATALOG);
+ databaseList = resourceMap.get(DATABASE);
+ tableList = resourceMap.get(TABLE);
+ columnList = resourceMap.get(COLUMN);
+ }
+ switch (resource.trim().toLowerCase()) {
+ case CATALOG:
+ catalogName = userInput;
+ break;
+ case DATABASE:
+ databaseName = userInput;
+ case TABLE:
+ tableName = userInput;
+ break;
+ case COLUMN:
+ columnName = userInput;
+ break;
+ default:
+ break;
+ }
+ }
+
+ if (serviceName != null) {
+ try {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(
+ "==> StarRocksResourceManager.getStarRocksResources() UserInput: \"" + userInput + "\" configs: " + configs +
+ " catalogList: " + catalogList + " tableList: "
+ + tableList + " columnList: " + columnList);
+ }
+
+ final StarRocksClient starRocksClient =
+ new StarRocksConnectionManager().getStarRocksConnection(serviceName, serviceType, configs);
+
+ Callable> callableObj = null;
+
+ final String finalCatalogName;
+ final String finalSchemaName;
+ final String finalTableName;
+ final String finalColumnName;
+
+ final List finalCatalogList = catalogList;
+ final List finalSchemaList = databaseList;
+ final List finalTableList = tableList;
+ final List finalColumnList = columnList;
+
+ if (starRocksClient != null) {
+ if (catalogName != null) {
+ finalCatalogName = catalogName;
+ callableObj = new Callable>() {
+ @Override
+ public List call() throws Exception {
+ return starRocksClient.getCatalogList(finalCatalogName, finalCatalogList);
+ }
+ };
+ } else if (databaseName != null) {
+ finalSchemaName = databaseName;
+ callableObj = new Callable>() {
+ @Override
+ public List call() throws Exception {
+ return starRocksClient.getDatabaseList(finalSchemaName, finalCatalogList, finalSchemaList);
+ }
+ };
+ } else if (tableName != null) {
+ finalTableName = tableName;
+ callableObj = new Callable>() {
+ @Override
+ public List call() throws Exception {
+ return starRocksClient.getTableList(finalTableName, finalCatalogList, finalSchemaList,
+ finalTableList);
+ }
+ };
+ } else if (columnName != null) {
+ finalColumnName = columnName;
+ callableObj = new Callable>() {
+ @Override
+ public List call() throws Exception {
+ return starRocksClient.getColumnList(finalColumnName, finalCatalogList, finalSchemaList, finalTableList, finalColumnList);
+ }
+ };
+ }
+ if (callableObj != null) {
+ synchronized (starRocksClient) {
+ resultList = TimedEventUtil.timedTask(callableObj, 5, TimeUnit.SECONDS);
+ }
+ } else {
+ LOG.error("Could not initiate a StarRocks timedTask");
+ }
+ }
+ } catch (Exception e) {
+ LOG.error("Unable to get StarRocks resource", e);
+ throw e;
+ }
+ }
+ return resultList;
+ }
+}
diff --git a/pom.xml b/pom.xml
index 9b6f5e62bf..ca76ccc8a2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -293,6 +293,7 @@
plugin-nifi
plugin-nifi-registry
plugin-presto
+ plugin-starrocks
plugin-kudu
ugsync-util
ugsync
@@ -364,6 +365,7 @@
plugin-nifi-registry
plugin-presto
plugin-trino
+ plugin-starrocks
plugin-kudu
ugsync-util
ugsync
@@ -649,6 +651,19 @@
ranger-trino-plugin-shim
+
+ ranger-starrocks-plugin
+
+ agents-audit
+ agents-common
+ agents-cred
+ agents-installer
+ credentialbuilder
+ ranger-plugin-classloader
+ ranger-util
+ plugin-starrocks
+
+
ranger-nestedstructure-plugin
@@ -723,6 +738,7 @@
ranger-kylin-plugin-shim
plugin-presto
ranger-presto-plugin-shim
+ plugin-starrocks
plugin-elasticsearch
ranger-elasticsearch-plugin-shim
ranger-authn
@@ -780,6 +796,7 @@
plugin-nifi
plugin-nifi-registry
plugin-presto
+ plugin-starrocks
ugsync-util
ugsync
ugsync/ldapconfigchecktool/ldapconfigcheck
From c3eb7f0b7020bb09f9c9d9951068d2c19c295200 Mon Sep 17 00:00:00 2001
From: HangyuanLiu <460660596@qq.com>
Date: Fri, 15 Sep 2023 14:25:09 +0800
Subject: [PATCH 2/4] Support Ranger StarRocks Plugin
---
.../starrocks/client/StarRocksClient.java | 477 ++++++++++++++++++
.../client/StarRocksConnectionManager.java | 9 +
.../client/StarRocksResourceManager.java | 224 +++++++-
.../client/StarRocksResourceType.java | 30 ++
4 files changed, 728 insertions(+), 12 deletions(-)
create mode 100644 plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceType.java
diff --git a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksClient.java b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksClient.java
index 8e8aa8357d..a2a5fbb98c 100644
--- a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksClient.java
+++ b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksClient.java
@@ -500,6 +500,483 @@ public List run() {
return columnList;
}
+ public List getViewList(String needle, List catalogs, List schemas, List tables)
+ throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ Statement stat = null;
+ ResultSet rs = null;
+ String sql = null;
+
+ if (catalogs != null && !catalogs.isEmpty()
+ && schemas != null && !schemas.isEmpty()) {
+ try {
+ for (String catalog : catalogs) {
+ for (String schema : schemas) {
+ try {
+ sql = "SELECT TABLE_NAME FROM information_schema.views WHERE TABLE_SCHEMA = '"
+ + StringEscapeUtils.escapeSql(schema) + "'";
+
+ if (needle != null && !needle.isEmpty() && !needle.equals("*")) {
+ sql += " AND TABLE_NAME LIKE '" + StringEscapeUtils.escapeSql(needle) + "%'";
+ }
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String table = rs.getString(1);
+ if (tables != null && tables.contains(table)) {
+ continue;
+ }
+ ret.add(table);
+ }
+ } finally {
+ close(rs);
+ close(stat);
+ rs = null;
+ stat = null;
+ }
+ }
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqlt);
+ }
+ throw hdpException;
+ } catch (SQLException sqle) {
+ String msgDesc = "Unable to execute SQL [" + sql + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqle);
+ hdpException.generateResponseDataMap(false, getMessage(sqle),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqle);
+ }
+ throw hdpException;
+ }
+ }
+ }
+ return ret;
+ }
+
+ public List getMaterializedViewList(String needle, List catalogs, List schemas, List tables)
+ throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ Statement stat = null;
+ ResultSet rs = null;
+ String sql = null;
+
+ if (catalogs != null && !catalogs.isEmpty()
+ && schemas != null && !schemas.isEmpty()) {
+ try {
+ for (String catalog : catalogs) {
+ for (String schema : schemas) {
+ try {
+ sql = "SELECT TABLE_NAME FROM information_schema.materialized_views WHERE TABLE_SCHEMA = '"
+ + StringEscapeUtils.escapeSql(schema) + "'";
+
+ if (needle != null && !needle.isEmpty() && !needle.equals("*")) {
+ sql += " AND TABLE_NAME LIKE '" + StringEscapeUtils.escapeSql(needle) + "%'";
+ }
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String table = rs.getString(1);
+ if (tables != null && tables.contains(table)) {
+ continue;
+ }
+ ret.add(table);
+ }
+ } finally {
+ close(rs);
+ close(stat);
+ rs = null;
+ stat = null;
+ }
+ }
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqlt);
+ }
+ throw hdpException;
+ } catch (SQLException sqle) {
+ String msgDesc = "Unable to execute SQL [" + sql + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqle);
+ hdpException.generateResponseDataMap(false, getMessage(sqle),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqle);
+ }
+ throw hdpException;
+ }
+ }
+ }
+ return ret;
+ }
+
+ public List getFunctionList(String needle, List catalogs, List schemas, List tables)
+ throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ Statement stat = null;
+ ResultSet rs = null;
+ String sql = null;
+
+ if (catalogs != null && !catalogs.isEmpty()
+ && schemas != null && !schemas.isEmpty()) {
+ try {
+ for (String catalog : catalogs) {
+ for (String schema : schemas) {
+ try {
+ sql = "SHOW FULL FUNCTIONS FROM `" + schema + "`";
+
+ if (needle != null && !needle.isEmpty() && !needle.equals("*")) {
+ sql += " LIKE '" + StringEscapeUtils.escapeSql(needle) + "%'";
+ }
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String table = rs.getString(1);
+ if (tables != null && tables.contains(table)) {
+ continue;
+ }
+ ret.add(table);
+ }
+ } finally {
+ close(rs);
+ close(stat);
+ rs = null;
+ stat = null;
+ }
+ }
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqlt);
+ }
+ throw hdpException;
+ } catch (SQLException sqle) {
+ String msgDesc = "Unable to execute SQL [" + sql + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqle);
+ hdpException.generateResponseDataMap(false, getMessage(sqle),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqle);
+ }
+ throw hdpException;
+ }
+ }
+ }
+ return ret;
+ }
+
+ public List getFunctionList(String needle, List functions)
+ throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ Statement stat = null;
+ ResultSet rs = null;
+ String sql = null;
+
+ try {
+ try {
+ sql = "SHOW FULL GLOBAL FUNCTIONS";
+
+ if (needle != null && !needle.isEmpty() && !needle.equals("*")) {
+ sql += " LIKE '" + StringEscapeUtils.escapeSql(needle) + "%'";
+ }
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String table = rs.getString(1);
+ if (functions != null && functions.contains(table)) {
+ continue;
+ }
+ ret.add(table);
+ }
+ } finally {
+ close(rs);
+ close(stat);
+ rs = null;
+ stat = null;
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqlt);
+ }
+ throw hdpException;
+ } catch (SQLException sqle) {
+ String msgDesc = "Unable to execute SQL [" + sql + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqle);
+ hdpException.generateResponseDataMap(false, getMessage(sqle),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqle);
+ }
+ throw hdpException;
+ }
+ }
+ return ret;
+ }
+
+ public List getResourceList(String needle, List functions)
+ throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ Statement stat = null;
+ ResultSet rs = null;
+ String sql = null;
+
+ try {
+ try {
+ sql = "SHOW RESOURCES";
+ /*
+ if (needle != null && !needle.isEmpty() && !needle.equals("*")) {
+ sql += " LIKE '" + StringEscapeUtils.escapeSql(needle) + "%'";
+ }
+
+ */
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String table = rs.getString(1);
+ if (functions != null && functions.contains(table)) {
+ continue;
+ }
+ ret.add(table);
+ }
+ } finally {
+ close(rs);
+ close(stat);
+ rs = null;
+ stat = null;
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqlt);
+ }
+ throw hdpException;
+ } catch (SQLException sqle) {
+ String msgDesc = "Unable to execute SQL [" + sql + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqle);
+ hdpException.generateResponseDataMap(false, getMessage(sqle),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqle);
+ }
+ throw hdpException;
+ }
+ }
+ return ret;
+ }
+
+ public List getResourceGroupList(String needle, List functions)
+ throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ Statement stat = null;
+ ResultSet rs = null;
+ String sql = null;
+
+ try {
+ try {
+ sql = "SHOW RESOURCE GROUPS";
+ /*
+ if (needle != null && !needle.isEmpty() && !needle.equals("*")) {
+ sql += " LIKE '" + StringEscapeUtils.escapeSql(needle) + "%'";
+ }
+
+ */
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String table = rs.getString(1);
+ if (functions != null && functions.contains(table)) {
+ continue;
+ }
+ ret.add(table);
+ }
+ } finally {
+ close(rs);
+ close(stat);
+ rs = null;
+ stat = null;
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqlt);
+ }
+ throw hdpException;
+ } catch (SQLException sqle) {
+ String msgDesc = "Unable to execute SQL [" + sql + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqle);
+ hdpException.generateResponseDataMap(false, getMessage(sqle),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqle);
+ }
+ throw hdpException;
+ }
+ }
+ return ret;
+ }
+
+ public List getStorageVolumeList(String needle, List functions)
+ throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ Statement stat = null;
+ ResultSet rs = null;
+ String sql = null;
+
+ try {
+ try {
+ sql = "SHOW STORAGE VOLUMES";
+
+ if (needle != null && !needle.isEmpty() && !needle.equals("*")) {
+ sql += " LIKE '" + StringEscapeUtils.escapeSql(needle) + "%'";
+ }
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String table = rs.getString(1);
+ if (functions != null && functions.contains(table)) {
+ continue;
+ }
+ ret.add(table);
+ }
+ } finally {
+ close(rs);
+ close(stat);
+ rs = null;
+ stat = null;
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqlt);
+ }
+ throw hdpException;
+ } catch (SQLException sqle) {
+ String msgDesc = "Unable to execute SQL [" + sql + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqle);
+ hdpException.generateResponseDataMap(false, getMessage(sqle),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqle);
+ }
+ throw hdpException;
+ }
+ }
+ return ret;
+ }
+
+ public List getUserList(String needle, List users)
+ throws HadoopException {
+ List ret = new ArrayList<>();
+ if (con != null) {
+ Statement stat = null;
+ ResultSet rs = null;
+ String sql = null;
+
+ try {
+ try {
+ sql = "show users";
+ /*
+ if (needle != null && !needle.isEmpty() && !needle.equals("*")) {
+ sql += " LIKE '" + StringEscapeUtils.escapeSql(needle) + "%'";
+ }
+ */
+ stat = con.createStatement();
+ rs = stat.executeQuery(sql);
+ while (rs.next()) {
+ String originUser = rs.getString(1);
+ String user = originUser.split("@")[0].replace("'", "");
+ if (users != null && users.contains(user)) {
+ continue;
+ }
+ ret.add(user);
+ }
+ } finally {
+ close(rs);
+ close(stat);
+ rs = null;
+ stat = null;
+ }
+ } catch (SQLTimeoutException sqlt) {
+ String msgDesc = "Time Out, Unable to execute SQL [" + sql
+ + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqlt);
+ hdpException.generateResponseDataMap(false, getMessage(sqlt),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqlt);
+ }
+ throw hdpException;
+ } catch (SQLException sqle) {
+ String msgDesc = "Unable to execute SQL [" + sql + "].";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ sqle);
+ hdpException.generateResponseDataMap(false, getMessage(sqle),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksClient.getTables() Error : ", sqle);
+ }
+ throw hdpException;
+ }
+ }
+ return ret;
+ }
+
public static Map connectionTest(String serviceName,
Map connectionProperties)
throws Exception {
diff --git a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksConnectionManager.java b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksConnectionManager.java
index 26fbcbb8f7..ea1717dd7b 100644
--- a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksConnectionManager.java
+++ b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksConnectionManager.java
@@ -53,6 +53,8 @@ public StarRocksClient call() throws Exception {
return new StarRocksClient(serviceName, configs);
}
};
+
+ LOG.error("start");
try {
starrocksClient = TimedEventUtil.timedTask(connectStarRocks, 5, TimeUnit.SECONDS);
} catch (Exception e) {
@@ -60,6 +62,13 @@ public StarRocksClient call() throws Exception {
serviceName + " using config: " + configs, e);
}
+ LOG.error("end");
+ if (starrocksClient == null) {
+ LOG.error("null");
+ } else {
+ LOG.error("not null");
+ }
+
StarRocksClient oldClient = null;
if (starrocksClient != null) {
oldClient = starrocksConnectionCache.putIfAbsent(serviceName, starrocksClient);
diff --git a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java
index 89f938d744..1a65ff1e4f 100644
--- a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java
+++ b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java
@@ -28,14 +28,22 @@
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.CATALOG;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.COLUMN;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.DATABASE;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.FUNCTION;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.GLOBAL_FUNCTION;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.MATERIALIZED_VIEW;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.RESOURCE;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.RESOURCE_GROUP;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.STORAGE_VOLUME;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.TABLE;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.USER;
+import static org.apache.ranger.services.starrocks.client.StarRocksResourceType.VIEW;
+
public class StarRocksResourceManager {
private static final Logger LOG = LoggerFactory.getLogger(StarRocksResourceManager.class);
- private static final String CATALOG = "catalog";
- private static final String DATABASE = "database";
- private static final String TABLE = "table";
- private static final String COLUMN = "column";
-
public static Map connectionTest(String serviceName, Map configs) throws Exception {
Map ret = null;
@@ -57,8 +65,8 @@ public static Map connectionTest(String serviceName, Map getStarRocksResources(String serviceName, String serviceType, Map configs,
- ResourceLookupContext context) throws Exception {
+ public static List getStarRocksResources2(String serviceName, String serviceType, Map configs,
+ ResourceLookupContext context) throws Exception {
String userInput = context.getUserInput();
String resource = context.getResourceName();
@@ -75,7 +83,8 @@ public static List getStarRocksResources(String serviceName, String serv
String columnName = null;
if (LOG.isDebugEnabled()) {
- LOG.debug("<== StarRocksResourceManager.getStarRocksResources() UserInput: \"" + userInput + "\" resource : " + resource +
+ LOG.debug("<== StarRocksResourceManager.getStarRocksResources() UserInput: \"" + userInput + "\" resource : " +
+ resource +
" resourceMap: " + resourceMap);
}
@@ -118,7 +127,8 @@ public static List getStarRocksResources(String serviceName, String serv
if (LOG.isDebugEnabled()) {
LOG.debug(
- "==> StarRocksResourceManager.getStarRocksResources() UserInput: \"" + userInput + "\" configs: " + configs +
+ "==> StarRocksResourceManager.getStarRocksResources() UserInput: \"" + userInput + "\" configs: " +
+ configs +
" catalogList: " + catalogList + " tableList: "
+ tableList + " columnList: " + columnList);
}
@@ -169,14 +179,15 @@ public List call() throws Exception {
callableObj = new Callable>() {
@Override
public List call() throws Exception {
- return starRocksClient.getColumnList(finalColumnName, finalCatalogList, finalSchemaList, finalTableList, finalColumnList);
+ return starRocksClient.getColumnList(finalColumnName, finalCatalogList, finalSchemaList,
+ finalTableList, finalColumnList);
}
};
}
if (callableObj != null) {
- synchronized (starRocksClient) {
+ //synchronized (starRocksClient) {
resultList = TimedEventUtil.timedTask(callableObj, 5, TimeUnit.SECONDS);
- }
+ //}
} else {
LOG.error("Could not initiate a StarRocks timedTask");
}
@@ -188,4 +199,193 @@ public List call() throws Exception {
}
return resultList;
}
+
+ public static List getStarRocksResources(String serviceName, String serviceType, Map configs,
+ ResourceLookupContext context) throws Exception {
+
+ String userInput = context.getUserInput();
+ if (userInput == null || userInput.isEmpty()) {
+ userInput = "*";
+ }
+
+ String resourceName = context.getResourceName().trim().toLowerCase();
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== StarRocksResourceManager.getStarRocksResources() UserInput: \"" + userInput + "\" resourceName : " +
+ resourceName + " resourceMap: " + context.getResources());
+ }
+
+ LOG.error("<== StarRocksResourceManager.getStarRocksResources() UserInput: \"" + userInput + "\" resourceName : " +
+ resourceName + " resourceMap: " + context.getResources());
+
+ List resultList = null;
+ try {
+ final StarRocksClient starRocksClient =
+ new StarRocksConnectionManager().getStarRocksConnection(serviceName, serviceType, configs);
+
+ Callable> callableObj = null;
+ if (starRocksClient != null) {
+ switch (resourceName) {
+ case CATALOG: {
+ Map> resourceMap = context.getResources();
+ List catalogList = resourceMap.get(CATALOG);
+ String catalogName = userInput;
+
+ callableObj = () -> starRocksClient.getCatalogList(catalogName, catalogList);
+ }
+ break;
+ case DATABASE: {
+ Map> resourceMap = context.getResources();
+ List catalogList = resourceMap.get(CATALOG);
+ List databaseList = resourceMap.get(DATABASE);
+ String dbName = userInput;
+
+ callableObj = () -> starRocksClient.getDatabaseList(dbName, catalogList, databaseList);
+ }
+ break;
+ case TABLE: {
+ Map> resourceMap = context.getResources();
+ List catalogList = resourceMap.get(CATALOG);
+ List databaseList = resourceMap.get(DATABASE);
+ List tableList = resourceMap.get(TABLE);
+ String tableName = userInput;
+
+ callableObj = () -> starRocksClient.getTableList(tableName, catalogList, databaseList, tableList);
+ }
+ break;
+ case COLUMN: {
+ Map> resourceMap = context.getResources();
+ List catalogList = resourceMap.get(CATALOG);
+ List databaseList = resourceMap.get(DATABASE);
+ List tableList = resourceMap.get(TABLE);
+ List columnList = resourceMap.get(COLUMN);
+ String columnName = userInput;
+
+ callableObj = () -> starRocksClient.getColumnList(columnName, catalogList, databaseList, tableList,
+ columnList);
+ }
+ break;
+ case VIEW: {
+ Map> resourceMap = context.getResources();
+ List catalogList = resourceMap.get(CATALOG);
+ List databaseList = resourceMap.get(DATABASE);
+ List viewList = resourceMap.get(VIEW);
+ String viewName = userInput;
+
+ callableObj = () -> starRocksClient.getViewList(viewName, catalogList, databaseList, viewList);
+ }
+ break;
+ case MATERIALIZED_VIEW: {
+ Map> resourceMap = context.getResources();
+ List catalogList = resourceMap.get(CATALOG);
+ List databaseList = resourceMap.get(DATABASE);
+ List mvList = resourceMap.get(MATERIALIZED_VIEW);
+ String mvName = userInput;
+
+ callableObj = () -> starRocksClient.getMaterializedViewList(mvName, catalogList, databaseList, mvList);
+ }
+ break;
+ case FUNCTION: {
+ Map> resourceMap = context.getResources();
+ List catalogList = resourceMap.get(CATALOG);
+ List databaseList = resourceMap.get(DATABASE);
+ List mvList = resourceMap.get(FUNCTION);
+ String functionName = userInput;
+
+ callableObj = () -> starRocksClient.getFunctionList(functionName, catalogList, databaseList, mvList);
+ }
+ break;
+ case GLOBAL_FUNCTION: {
+ Map> resourceMap = context.getResources();
+ List globalFunctionList = resourceMap.get(GLOBAL_FUNCTION);
+ String functionName = userInput;
+ callableObj = () -> starRocksClient.getFunctionList(functionName, globalFunctionList);
+ }
+ break;
+ case RESOURCE: {
+ Map> resourceMap = context.getResources();
+ List globalFunctionList = resourceMap.get(RESOURCE);
+ String functionName = userInput;
+ callableObj = () -> starRocksClient.getResourceList(functionName, globalFunctionList);
+ }
+ break;
+ case RESOURCE_GROUP: {
+ Map> resourceMap = context.getResources();
+ List globalFunctionList = resourceMap.get(RESOURCE_GROUP);
+ String functionName = userInput;
+ callableObj = () -> starRocksClient.getResourceGroupList(functionName, globalFunctionList);
+ }
+ break;
+ case STORAGE_VOLUME: {
+ Map> resourceMap = context.getResources();
+ List globalFunctionList = resourceMap.get(STORAGE_VOLUME);
+ String functionName = userInput;
+ callableObj = () -> starRocksClient.getStorageVolumeList(functionName, globalFunctionList);
+ }
+ break;
+ case USER: {
+ Map> resourceMap = context.getResources();
+ List globalFunctionList = resourceMap.get(USER);
+ String functionName = userInput;
+ callableObj = () -> starRocksClient.getUserList(functionName, globalFunctionList);
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ if (callableObj != null) {
+ synchronized (starRocksClient) {
+ resultList = TimedEventUtil.timedTask(callableObj, 5, TimeUnit.SECONDS);
+ }
+ } else {
+ LOG.error("Could not initiate a StarRocks timedTask");
+ }
+ }
+ } catch (Exception e) {
+ LOG.error("Unable to get StarRocks resource", e);
+ throw e;
+ }
+
+ return resultList;
+ }
+
+ private static Callable> getCatalogResource(StarRocksClient starRocksClient, ResourceLookupContext context,
+ String catalogName) {
+ Map> resourceMap = context.getResources();
+ List catalogList = resourceMap.get(CATALOG);
+
+ return () -> starRocksClient.getCatalogList(catalogName, catalogList);
+ }
+
+ private static Callable> getDatabaseResource(StarRocksClient starRocksClient, ResourceLookupContext context,
+ String dbName) {
+ Map> resourceMap = context.getResources();
+ List catalogList = resourceMap.get(CATALOG);
+ List databaseList = resourceMap.get(DATABASE);
+
+ return () -> starRocksClient.getDatabaseList(dbName, catalogList, databaseList);
+ }
+
+ private static Callable> getTableResource(StarRocksClient starRocksClient, ResourceLookupContext context,
+ String tableName) {
+ Map> resourceMap = context.getResources();
+ List catalogList = resourceMap.get(CATALOG);
+ List databaseList = resourceMap.get(DATABASE);
+ List tableList = resourceMap.get(TABLE);
+
+ return () -> starRocksClient.getTableList(tableName, catalogList, databaseList, tableList);
+ }
+
+ private static Callable> getColumnResource(StarRocksClient starRocksClient, ResourceLookupContext context,
+ String columnName) {
+ Map> resourceMap = context.getResources();
+ List catalogList = resourceMap.get(CATALOG);
+ List databaseList = resourceMap.get(DATABASE);
+ List tableList = resourceMap.get(TABLE);
+ List columnList = resourceMap.get(COLUMN);
+
+ return () -> starRocksClient.getColumnList(columnName, catalogList, databaseList, tableList, columnList);
+ }
}
diff --git a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceType.java b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceType.java
new file mode 100644
index 0000000000..bca3af3fda
--- /dev/null
+++ b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceType.java
@@ -0,0 +1,30 @@
+// Copyright 2021-present StarRocks, Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package org.apache.ranger.services.starrocks.client;
+
+public class StarRocksResourceType {
+ public static final String CATALOG = "catalog";
+ public static final String DATABASE = "database";
+ public static final String TABLE = "table";
+ public static final String COLUMN = "column";
+ public static final String VIEW = "view";
+ public static final String MATERIALIZED_VIEW = "materialized_view";
+ public static final String FUNCTION = "function";
+ public static final String GLOBAL_FUNCTION = "global_function";
+ public static final String RESOURCE = "resource";
+ public static final String RESOURCE_GROUP = "resource_group";
+ public static final String STORAGE_VOLUME = "storage_volume";
+ public static final String USER = "user";
+ public static final String SYSTEM = "system";
+}
From 06c1ed8de109953bf7f45642d1ed33dd64e584be Mon Sep 17 00:00:00 2001
From: HangyuanLiu <460660596@qq.com>
Date: Fri, 15 Sep 2023 14:25:09 +0800
Subject: [PATCH 3/4] Support Ranger StarRocks Plugin
---
.../client/StarRocksResourceManager.java | 178 +-----------------
1 file changed, 1 insertion(+), 177 deletions(-)
diff --git a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java
index 1a65ff1e4f..816e396f5f 100644
--- a/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java
+++ b/plugin-starrocks/src/main/java/org/apache/ranger/services/starrocks/client/StarRocksResourceManager.java
@@ -45,7 +45,7 @@ public class StarRocksResourceManager {
private static final Logger LOG = LoggerFactory.getLogger(StarRocksResourceManager.class);
public static Map connectionTest(String serviceName, Map configs) throws Exception {
- Map ret = null;
+ Map ret;
if (LOG.isDebugEnabled()) {
LOG.debug("==> StarRocksResourceManager.connectionTest() ServiceName: " + serviceName + " Configs: " + configs);
@@ -65,141 +65,6 @@ public static Map connectionTest(String serviceName, Map getStarRocksResources2(String serviceName, String serviceType, Map configs,
- ResourceLookupContext context) throws Exception {
-
- String userInput = context.getUserInput();
- String resource = context.getResourceName();
- Map> resourceMap = context.getResources();
- List resultList = null;
- List catalogList = null;
- List databaseList = null;
- List tableList = null;
- List columnList = null;
-
- String catalogName = null;
- String databaseName = null;
- String tableName = null;
- String columnName = null;
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("<== StarRocksResourceManager.getStarRocksResources() UserInput: \"" + userInput + "\" resource : " +
- resource +
- " resourceMap: " + resourceMap);
- }
-
- LOG.error("UserInput : " + userInput + " ," + "Resource : " + resource);
-
- if (userInput == null) {
- LOG.error("UserInput is null");
- }
-
- if (userInput == null || userInput.isEmpty()) {
- userInput = "*";
- }
-
- if (resource != null) {
- if (resourceMap != null && !resourceMap.isEmpty()) {
- catalogList = resourceMap.get(CATALOG);
- databaseList = resourceMap.get(DATABASE);
- tableList = resourceMap.get(TABLE);
- columnList = resourceMap.get(COLUMN);
- }
- switch (resource.trim().toLowerCase()) {
- case CATALOG:
- catalogName = userInput;
- break;
- case DATABASE:
- databaseName = userInput;
- case TABLE:
- tableName = userInput;
- break;
- case COLUMN:
- columnName = userInput;
- break;
- default:
- break;
- }
- }
-
- if (serviceName != null) {
- try {
-
- if (LOG.isDebugEnabled()) {
- LOG.debug(
- "==> StarRocksResourceManager.getStarRocksResources() UserInput: \"" + userInput + "\" configs: " +
- configs +
- " catalogList: " + catalogList + " tableList: "
- + tableList + " columnList: " + columnList);
- }
-
- final StarRocksClient starRocksClient =
- new StarRocksConnectionManager().getStarRocksConnection(serviceName, serviceType, configs);
-
- Callable> callableObj = null;
-
- final String finalCatalogName;
- final String finalSchemaName;
- final String finalTableName;
- final String finalColumnName;
-
- final List finalCatalogList = catalogList;
- final List finalSchemaList = databaseList;
- final List finalTableList = tableList;
- final List finalColumnList = columnList;
-
- if (starRocksClient != null) {
- if (catalogName != null) {
- finalCatalogName = catalogName;
- callableObj = new Callable>() {
- @Override
- public List call() throws Exception {
- return starRocksClient.getCatalogList(finalCatalogName, finalCatalogList);
- }
- };
- } else if (databaseName != null) {
- finalSchemaName = databaseName;
- callableObj = new Callable>() {
- @Override
- public List call() throws Exception {
- return starRocksClient.getDatabaseList(finalSchemaName, finalCatalogList, finalSchemaList);
- }
- };
- } else if (tableName != null) {
- finalTableName = tableName;
- callableObj = new Callable>() {
- @Override
- public List call() throws Exception {
- return starRocksClient.getTableList(finalTableName, finalCatalogList, finalSchemaList,
- finalTableList);
- }
- };
- } else if (columnName != null) {
- finalColumnName = columnName;
- callableObj = new Callable>() {
- @Override
- public List call() throws Exception {
- return starRocksClient.getColumnList(finalColumnName, finalCatalogList, finalSchemaList,
- finalTableList, finalColumnList);
- }
- };
- }
- if (callableObj != null) {
- //synchronized (starRocksClient) {
- resultList = TimedEventUtil.timedTask(callableObj, 5, TimeUnit.SECONDS);
- //}
- } else {
- LOG.error("Could not initiate a StarRocks timedTask");
- }
- }
- } catch (Exception e) {
- LOG.error("Unable to get StarRocks resource", e);
- throw e;
- }
- }
- return resultList;
- }
-
public static List getStarRocksResources(String serviceName, String serviceType, Map configs,
ResourceLookupContext context) throws Exception {
@@ -215,9 +80,6 @@ public static List getStarRocksResources(String serviceName, String serv
resourceName + " resourceMap: " + context.getResources());
}
- LOG.error("<== StarRocksResourceManager.getStarRocksResources() UserInput: \"" + userInput + "\" resourceName : " +
- resourceName + " resourceMap: " + context.getResources());
-
List resultList = null;
try {
final StarRocksClient starRocksClient =
@@ -350,42 +212,4 @@ public static List getStarRocksResources(String serviceName, String serv
return resultList;
}
-
- private static Callable> getCatalogResource(StarRocksClient starRocksClient, ResourceLookupContext context,
- String catalogName) {
- Map> resourceMap = context.getResources();
- List catalogList = resourceMap.get(CATALOG);
-
- return () -> starRocksClient.getCatalogList(catalogName, catalogList);
- }
-
- private static Callable> getDatabaseResource(StarRocksClient starRocksClient, ResourceLookupContext context,
- String dbName) {
- Map> resourceMap = context.getResources();
- List catalogList = resourceMap.get(CATALOG);
- List databaseList = resourceMap.get(DATABASE);
-
- return () -> starRocksClient.getDatabaseList(dbName, catalogList, databaseList);
- }
-
- private static Callable> getTableResource(StarRocksClient starRocksClient, ResourceLookupContext context,
- String tableName) {
- Map> resourceMap = context.getResources();
- List catalogList = resourceMap.get(CATALOG);
- List