Skip to content

Commit d4641e2

Browse files
committed
add CI yaml
1 parent 09a1712 commit d4641e2

File tree

4 files changed

+130
-12
lines changed

4 files changed

+130
-12
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
name: Authorization Integration Test
2+
3+
# Controls when the workflow will run
4+
on:
5+
# Triggers the workflow on push or pull request events but only for the "main" branch
6+
push:
7+
branches: [ "main", "branch-*" ]
8+
pull_request:
9+
branches: [ "main", "branch-*" ]
10+
11+
concurrency:
12+
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
13+
cancel-in-progress: true
14+
15+
jobs:
16+
changes:
17+
runs-on: ubuntu-latest
18+
steps:
19+
- uses: actions/checkout@v3
20+
- uses: dorny/paths-filter@v2
21+
id: filter
22+
with:
23+
filters: |
24+
source_changes:
25+
- .github/**
26+
- api/**
27+
- authorizations/**
28+
- bin/**
29+
- catalogs/**
30+
- clients/client-java/**
31+
- clients/client-java-runtime/**
32+
- clients/filesystem-hadoop3/**
33+
- clients/filesystem-hadoop3-runtime/**
34+
- common/**
35+
- conf/**
36+
- core/**
37+
- gradle/**
38+
- integration-test-common/**
39+
- iceberg/**
40+
- meta/**
41+
- server/**
42+
- server-common/**
43+
- trino-connector/**
44+
- web/**
45+
- build.gradle.kts
46+
- gradle.properties
47+
- gradlew
48+
- setting.gradle.kts
49+
outputs:
50+
source_changes: ${{ steps.filter.outputs.source_changes }}
51+
52+
# Integration test for AMD64 architecture
53+
test-amd64-arch:
54+
needs: changes
55+
if: needs.changes.outputs.source_changes == 'true'
56+
runs-on: ubuntu-latest
57+
timeout-minutes: 60
58+
strategy:
59+
matrix:
60+
# Integration test for AMD64 architecture
61+
architecture: [linux/amd64]
62+
java-version: [ 8, 11, 17 ]
63+
test-mode: [ embedded, deploy ]
64+
include:
65+
- test-mode: 'embedded'
66+
backend: 'h2'
67+
- test-mode: 'deploy'
68+
backend: 'mysql'
69+
70+
env:
71+
PLATFORM: ${{ matrix.architecture }}
72+
steps:
73+
- uses: actions/checkout@v3
74+
75+
- uses: actions/setup-java@v4
76+
with:
77+
java-version: ${{ matrix.java-version }}
78+
distribution: 'temurin'
79+
cache: 'gradle'
80+
81+
- name: Set up QEMU
82+
uses: docker/setup-qemu-action@v2
83+
84+
- name: Check required command
85+
run: |
86+
dev/ci/check_commands.sh
87+
88+
- name: Package Gravitino
89+
if : ${{ matrix.test-mode == 'deploy' }}
90+
run: |
91+
./gradlew compileDistribution -x test -PjdkVersion=${{ matrix.java-version }}
92+
93+
- name: Free up disk space
94+
run: |
95+
dev/ci/util_free_space.sh
96+
97+
- name: Authorization Integration Test(JDK${{ matrix.java-version }}-${{ matrix.test-mode }}-${{ matrix.backend }})
98+
id: integrationTest
99+
run: |
100+
./gradlew -PskipTests -PtestMode=${{ matrix.test-mode }} -PjdbcBackend=${{ matrix.backend }} -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:authorization-ranger:test --tests "org.apache.gravitino.authorization.ranger.integration.test.**"
101+
102+
- name: Upload integrate tests reports
103+
uses: actions/upload-artifact@v3
104+
if: ${{ (failure() && steps.integrationTest.outcome == 'failure') || contains(github.event.pull_request.labels.*.name, 'upload log') }}
105+
with:
106+
name: authorizations-integrate-test-reports-${{ matrix.java-version }}
107+
path: |
108+
build/reports
109+
distribution/package/logs/gravitino-server.out
110+
distribution/package/logs/gravitino-server.log

.github/workflows/backend-integration-test.yml

+1
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,7 @@ jobs:
102102
-x :web:test -x :clients:client-python:test -x :flink-connector:test -x :spark-connector:test -x :spark-connector:spark-common:test
103103
-x :spark-connector:spark-3.3:test -x :spark-connector:spark-3.4:test -x :spark-connector:spark-3.5:test
104104
-x :spark-connector:spark-runtime-3.3:test -x :spark-connector:spark-runtime-3.4:test -x :spark-connector:spark-runtime-3.5:test
105+
-x :authorizations:authorization-ranger:test
105106
106107
- name: Upload integrate tests reports
107108
uses: actions/upload-artifact@v3

authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java

+5-6
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,11 @@ public class RangerHiveE2EIT extends AbstractIT {
9494
private static String HIVE_METASTORE_URIS;
9595

9696
@BeforeAll
97-
public static void setup() throws Exception {
97+
public static void startIntegrationTest() throws Exception {
98+
Map<String, String> configs = Maps.newHashMap();
99+
configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true));
100+
configs.put(Configs.SERVICE_ADMINS.getKey(), AuthConstants.ANONYMOUS_USER);
101+
registerCustomConfigs(configs);
98102
AbstractIT.startIntegrationTest();
99103

100104
RangerITEnv.setup();
@@ -105,11 +109,6 @@ public static void setup() throws Exception {
105109
containerSuite.getHiveContainer().getContainerIpAddress(),
106110
HiveContainer.HIVE_METASTORE_PORT);
107111

108-
Map<String, String> configs = Maps.newHashMap();
109-
configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true));
110-
configs.put(Configs.SERVICE_ADMINS.getKey(), AuthConstants.ANONYMOUS_USER);
111-
registerCustomConfigs(configs);
112-
113112
createMetalake();
114113
createCatalogAndRangerAuthPlugin();
115114
createSchema();

authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java

+14-6
Original file line numberDiff line numberDiff line change
@@ -54,17 +54,23 @@ public class RangerITEnv {
5454
protected static final String RANGER_HDFS_REPO_NAME = "hdfsDev";
5555
private static final String RANGER_HDFS_TYPE = "hdfs";
5656
protected static RangerClient rangerClient;
57-
57+
private static volatile Boolean initRangerService = Boolean.FALSE;
5858
private static final ContainerSuite containerSuite = ContainerSuite.getInstance();
5959

6060
public static void setup() {
6161
containerSuite.startRangerContainer();
6262
rangerClient = containerSuite.getRangerContainer().rangerClient;
63-
// No IP address set, no impact on testing
64-
createRangerHdfsRepository("", true);
65-
createRangerHiveRepository("", true);
66-
allowAnyoneAccessHDFS();
67-
allowAnyoneAccessInformationSchema();
63+
64+
if (initRangerService.equals(Boolean.FALSE)) {
65+
synchronized (RangerITEnv.class) {
66+
// No IP address set, no impact on testing
67+
createRangerHdfsRepository("", true);
68+
createRangerHiveRepository("", true);
69+
allowAnyoneAccessHDFS();
70+
allowAnyoneAccessInformationSchema();
71+
initRangerService = Boolean.TRUE;
72+
}
73+
}
6874
}
6975

7076
public static void cleanup() {
@@ -91,6 +97,7 @@ static void allowAnyoneAccessHDFS() {
9197
}
9298
} catch (RangerServiceException e) {
9399
// If the policy doesn't exist, we will create it
100+
LOG.warn("Error while fetching policy: {}", e.getMessage());
94101
}
95102

96103
Map<String, RangerPolicy.RangerPolicyResource> policyResourceMap =
@@ -122,6 +129,7 @@ static void allowAnyoneAccessInformationSchema() {
122129
}
123130
} catch (RangerServiceException e) {
124131
// If the policy doesn't exist, we will create it
132+
LOG.warn("Error while fetching policy: {}", e.getMessage());
125133
}
126134

127135
Map<String, RangerPolicy.RangerPolicyResource> policyResourceMap =

0 commit comments

Comments
 (0)