@@ -96,14 +96,13 @@ project.extra["extraJvmArgs"] = if (extra["jdkVersion"] in listOf("8", "11")) {
96
96
" --add-opens" , " java.base/java.util=ALL-UNNAMED" ,
97
97
" --add-opens" , " java.base/jdk.internal.ref=ALL-UNNAMED" ,
98
98
" --add-opens" , " java.base/jdk.internal.reflect=ALL-UNNAMED" ,
99
- " --add-opens" , " java.sql/java.sql=ALL-UNNAMED" ,
100
99
" --add-opens" , " java.base/sun.util.calendar=ALL-UNNAMED" ,
101
100
" --add-opens" , " java.base/sun.nio.ch=ALL-UNNAMED" ,
102
101
" --add-opens" , " java.base/sun.nio.cs=ALL-UNNAMED" ,
103
102
" --add-opens" , " java.base/sun.security.action=ALL-UNNAMED" ,
104
103
" --add-opens" , " java.base/sun.util.calendar=ALL-UNNAMED" ,
105
104
" --add-opens" , " java.security.jgss/sun.security.krb5=ALL-UNNAMED" ,
106
- " --add-opens" , " java.base /java.lang.reflect =ALL-UNNAMED"
105
+ " --add-opens" , " java.sql /java.sql =ALL-UNNAMED" ,
107
106
)
108
107
}
109
108
@@ -325,16 +324,16 @@ subprojects {
325
324
" ComparableType" ,
326
325
" ConstantOverflow" ,
327
326
" DoubleBraceInitialization" ,
328
- " EqualsUnsafeCast" ,
329
327
" EmptyBlockTag" ,
328
+ " EqualsUnsafeCast" ,
330
329
" FutureReturnValueIgnored" ,
331
330
" InconsistentCapitalization" ,
332
331
" InconsistentHashCode" ,
333
332
" JavaTimeDefaultTimeZone" ,
334
333
" JdkObsolete" ,
335
334
" LockNotBeforeTry" ,
336
- " MissingSummary" ,
337
335
" MissingOverride" ,
336
+ " MissingSummary" ,
338
337
" MutableConstantField" ,
339
338
" NonOverridingEquals" ,
340
339
" ObjectEqualsForPrimitives" ,
@@ -360,7 +359,12 @@ subprojects {
360
359
options.locale = " en_US"
361
360
362
361
val projectName = project.name
363
- if (projectName == " common" || projectName == " api" || projectName == " client-java" || projectName == " client-cli" || projectName == " filesystem-hadoop3" ) {
362
+ if (projectName == " common" ||
363
+ projectName == " api" ||
364
+ projectName == " client-java" ||
365
+ projectName == " client-cli" ||
366
+ projectName == " filesystem-hadoop3"
367
+ ) {
364
368
options {
365
369
(this as CoreJavadocOptions ).addStringOption(" Xwerror" , " -quiet" )
366
370
isFailOnError = true
@@ -592,8 +596,80 @@ tasks {
592
596
val projectDir = layout.projectDirectory
593
597
val outputDir = projectDir.dir(" distribution" )
594
598
599
+ val copySubprojectDependencies by registering (Copy ::class ) {
600
+ subprojects.forEach() {
601
+ if (
602
+ ! it.name.startsWith(" authorization" ) &&
603
+ ! it.name.startsWith(" catalog" ) &&
604
+ ! it.name.startsWith(" cli" ) &&
605
+ ! it.name.startsWith(" client" ) &&
606
+ ! it.name.startsWith(" filesystem" ) &&
607
+ ! it.name.startsWith(" flink" ) &&
608
+ ! it.name.startsWith(" iceberg" ) &&
609
+ ! it.name.startsWith(" spark" ) &&
610
+ it.name != " bundled-catalog" &&
611
+ it.name != " hadoop-common" &&
612
+ it.name != " hive-metastore-common" &&
613
+ it.name != " integration-test" &&
614
+ it.name != " trino-connector" &&
615
+ it.parent?.name != " bundles"
616
+ ) {
617
+ from(it.configurations.runtimeClasspath)
618
+ into(" distribution/package/libs" )
619
+ }
620
+ }
621
+ }
622
+
623
+ val copySubprojectLib by registering(Copy ::class ) {
624
+ subprojects.forEach() {
625
+ if (
626
+ ! it.name.startsWith(" authorization" ) &&
627
+ ! it.name.startsWith(" catalog" ) &&
628
+ ! it.name.startsWith(" cli" ) &&
629
+ ! it.name.startsWith(" client" ) &&
630
+ ! it.name.startsWith(" filesystem" ) &&
631
+ ! it.name.startsWith(" flink" ) &&
632
+ ! it.name.startsWith(" iceberg" ) &&
633
+ ! it.name.startsWith(" integration-test" ) &&
634
+ ! it.name.startsWith(" spark" ) &&
635
+ ! it.name.startsWith(" trino-connector" ) &&
636
+ it.name != " docs" &&
637
+ it.name != " hadoop-common" &&
638
+ it.name != " hive-metastore-common" &&
639
+ it.parent?.name != " bundles"
640
+ ) {
641
+ dependsOn(" ${it.name} :build" )
642
+ from(" ${it.name} /build/libs" )
643
+ into(" distribution/package/libs" )
644
+ include(" *.jar" )
645
+ setDuplicatesStrategy(DuplicatesStrategy .INCLUDE )
646
+ }
647
+ }
648
+ }
649
+
650
+ val copyCatalogLibAndConfigs by registering(Copy ::class ) {
651
+ dependsOn(
652
+ " :catalogs:catalog-hadoop:copyLibAndConfig" ,
653
+ " :catalogs:catalog-hive:copyLibAndConfig" ,
654
+ " :catalogs:catalog-jdbc-doris:copyLibAndConfig" ,
655
+ " :catalogs:catalog-jdbc-mysql:copyLibAndConfig" ,
656
+ " :catalogs:catalog-jdbc-oceanbase:copyLibAndConfig" ,
657
+ " :catalogs:catalog-jdbc-postgresql:copyLibAndConfig" ,
658
+ " :catalogs:catalog-lakehouse-iceberg:copyLibAndConfig" ,
659
+ " :catalogs:catalog-lakehouse-paimon:copyLibAndConfig" ,
660
+ " :catalogs:catalog-lakehouse-hudi:copyLibAndConfig" ,
661
+ " :catalogs:catalog-kafka:copyLibAndConfig" ,
662
+ " :catalogs:catalog-model:copyLibAndConfig"
663
+ )
664
+ }
665
+
595
666
val compileDistribution by registering {
596
- dependsOn(" :web:web:build" , " copySubprojectDependencies" , " copyCatalogLibAndConfigs" , " :authorizations:copyLibAndConfig" , " copySubprojectLib" , " iceberg:iceberg-rest-server:copyLibAndConfigs" )
667
+ dependsOn(" :web:web:build" ,
668
+ copySubprojectDependencies,
669
+ copyCatalogLibAndConfigs,
670
+ " :authorizations:copyLibAndConfig" ,
671
+ copySubprojectLib,
672
+ " iceberg:iceberg-rest-server:copyLibAndConfigs" )
597
673
598
674
group = " gravitino distribution"
599
675
outputs.dir(projectDir.dir(" distribution/package" ))
@@ -641,11 +717,18 @@ tasks {
641
717
doLast {
642
718
copy {
643
719
from(projectDir.dir(" conf" )) {
644
- include(" ${rootProject.name} -iceberg-rest-server.conf.template" , " ${rootProject.name} -env.sh.template" , " log4j2.properties.template" )
720
+ include(
721
+ " ${rootProject.name} -iceberg-rest-server.conf.template" ,
722
+ " ${rootProject.name} -env.sh.template" ,
723
+ " log4j2.properties.template"
724
+ )
645
725
into(" ${rootProject.name} -iceberg-rest-server/conf" )
646
726
}
647
727
from(projectDir.dir(" bin" )) {
648
- include(" common.sh" , " ${rootProject.name} -iceberg-rest-server.sh" )
728
+ include(
729
+ " common.sh" ,
730
+ " ${rootProject.name} -iceberg-rest-server.sh"
731
+ )
649
732
into(" ${rootProject.name} -iceberg-rest-server/bin" )
650
733
}
651
734
into(outputDir)
@@ -695,42 +778,37 @@ tasks {
695
778
}
696
779
}
697
780
698
- val assembleDistribution by registering(Tar ::class ) {
699
- dependsOn(" assembleTrinoConnector" , " assembleIcebergRESTServer" )
781
+ val checksumTrinoConnector by registering {
700
782
group = " gravitino distribution"
701
- finalizedBy(" checksumDistribution" )
702
- into(" ${rootProject.name} -$version -bin" )
703
- from(compileDistribution.map { it.outputs.files.single() })
704
- compression = Compression .GZIP
705
- archiveFileName.set(" ${rootProject.name} -$version -bin.tar.gz" )
706
- destinationDirectory.set(projectDir.dir(" distribution" ))
783
+ // dependsOn(assembleTrinoConnector)
784
+ val archiveFile = assembleTrinoConnector.flatMap { it.archiveFile }
785
+ val checksumFile = archiveFile.map { archive ->
786
+ archive.asFile.let { it.resolveSibling(" ${it.name} .sha256" ) }
787
+ }
788
+ inputs.file(archiveFile)
789
+ outputs.file(checksumFile)
790
+ doLast {
791
+ checksumFile.get().writeText(
792
+ serviceOf<ChecksumService >().sha256(archiveFile.get().asFile).toString()
793
+ )
794
+ }
707
795
}
708
796
797
+
709
798
val assembleTrinoConnector by registering(Tar ::class ) {
710
- dependsOn(" compileTrinoConnector" )
799
+ dependsOn(compileTrinoConnector)
711
800
group = " gravitino distribution"
712
- finalizedBy(" checksumTrinoConnector" )
801
+ finalizedBy(checksumTrinoConnector)
713
802
into(" ${rootProject.name} -trino-connector-$version " )
714
803
from(compileTrinoConnector.map { it.outputs.files.single() })
715
804
compression = Compression .GZIP
716
805
archiveFileName.set(" ${rootProject.name} -trino-connector-$version .tar.gz" )
717
806
destinationDirectory.set(projectDir.dir(" distribution" ))
718
807
}
719
808
720
- val assembleIcebergRESTServer by registering(Tar ::class ) {
721
- dependsOn(" compileIcebergRESTServer" )
809
+ val checksumIcebergRESTServerDistribution by registering {
722
810
group = " gravitino distribution"
723
- finalizedBy(" checksumIcebergRESTServerDistribution" )
724
- into(" ${rootProject.name} -iceberg-rest-server-$version -bin" )
725
- from(compileIcebergRESTServer.map { it.outputs.files.single() })
726
- compression = Compression .GZIP
727
- archiveFileName.set(" ${rootProject.name} -iceberg-rest-server-$version -bin.tar.gz" )
728
- destinationDirectory.set(projectDir.dir(" distribution" ))
729
- }
730
-
731
- register(" checksumIcebergRESTServerDistribution" ) {
732
- group = " gravitino distribution"
733
- dependsOn(assembleIcebergRESTServer)
811
+ // dependsOn(assembleIcebergRESTServer)
734
812
val archiveFile = assembleIcebergRESTServer.flatMap { it.archiveFile }
735
813
val checksumFile = archiveFile.map { archive ->
736
814
archive.asFile.let { it.resolveSibling(" ${it.name} .sha256" ) }
@@ -744,9 +822,20 @@ tasks {
744
822
}
745
823
}
746
824
747
- register(" checksumDistribution" ) {
825
+ val assembleIcebergRESTServer by registering(Tar ::class ) {
826
+ dependsOn(compileIcebergRESTServer)
827
+ group = " gravitino distribution"
828
+ finalizedBy(checksumIcebergRESTServerDistribution)
829
+ into(" ${rootProject.name} -iceberg-rest-server-$version -bin" )
830
+ from(compileIcebergRESTServer.map { it.outputs.files.single() })
831
+ compression = Compression .GZIP
832
+ archiveFileName.set(" ${rootProject.name} -iceberg-rest-server-$version -bin.tar.gz" )
833
+ destinationDirectory.set(projectDir.dir(" distribution" ))
834
+ }
835
+
836
+ val checksumDistribution by registering {
748
837
group = " gravitino distribution"
749
- dependsOn(assembleDistribution, " checksumTrinoConnector" , " checksumIcebergRESTServerDistribution" )
838
+ dependsOn(checksumTrinoConnector, checksumIcebergRESTServerDistribution)
750
839
val archiveFile = assembleDistribution.flatMap { it.archiveFile }
751
840
val checksumFile = archiveFile.map { archive ->
752
841
archive.asFile.let { it.resolveSibling(" ${it.name} .sha256" ) }
@@ -760,83 +849,22 @@ tasks {
760
849
}
761
850
}
762
851
763
- register(" checksumTrinoConnector" ) {
852
+ val assembleDistribution by registering(Tar ::class ) {
853
+ dependsOn(assembleTrinoConnector, assembleIcebergRESTServer)
764
854
group = " gravitino distribution"
765
- dependsOn(assembleTrinoConnector)
766
- val archiveFile = assembleTrinoConnector.flatMap { it.archiveFile }
767
- val checksumFile = archiveFile.map { archive ->
768
- archive.asFile.let { it.resolveSibling(" ${it.name} .sha256" ) }
769
- }
770
- inputs.file(archiveFile)
771
- outputs.file(checksumFile)
772
- doLast {
773
- checksumFile.get().writeText(
774
- serviceOf<ChecksumService >().sha256(archiveFile.get().asFile).toString()
775
- )
776
- }
855
+ finalizedBy(checksumDistribution)
856
+ into(" ${rootProject.name} -$version -bin" )
857
+ from(compileDistribution.map { it.outputs.files.single() })
858
+ compression = Compression .GZIP
859
+ archiveFileName.set(" ${rootProject.name} -$version -bin.tar.gz" )
860
+ destinationDirectory.set(projectDir.dir(" distribution" ))
777
861
}
778
862
779
863
val cleanDistribution by registering(Delete ::class ) {
780
864
group = " gravitino distribution"
781
865
delete(outputDir)
782
866
}
783
867
784
- register(" copySubprojectDependencies" , Copy ::class ) {
785
- subprojects.forEach() {
786
- if (! it.name.startsWith(" catalog" ) &&
787
- ! it.name.startsWith(" authorization" ) &&
788
- ! it.name.startsWith(" cli" ) &&
789
- ! it.name.startsWith(" client" ) && ! it.name.startsWith(" filesystem" ) && ! it.name.startsWith(" spark" ) && ! it.name.startsWith(" iceberg" ) && it.name != " trino-connector" &&
790
- it.name != " integration-test" && it.name != " bundled-catalog" && ! it.name.startsWith(" flink" ) &&
791
- it.name != " integration-test" && it.name != " hive-metastore-common" && ! it.name.startsWith(" flink" ) &&
792
- it.parent?.name != " bundles" && it.name != " hadoop-common"
793
- ) {
794
- from(it.configurations.runtimeClasspath)
795
- into(" distribution/package/libs" )
796
- }
797
- }
798
- }
799
-
800
- register(" copySubprojectLib" , Copy ::class ) {
801
- subprojects.forEach() {
802
- if (! it.name.startsWith(" catalog" ) &&
803
- ! it.name.startsWith(" client" ) &&
804
- ! it.name.startsWith(" cli" ) &&
805
- ! it.name.startsWith(" authorization" ) &&
806
- ! it.name.startsWith(" filesystem" ) &&
807
- ! it.name.startsWith(" spark" ) &&
808
- ! it.name.startsWith(" iceberg" ) &&
809
- ! it.name.startsWith(" integration-test" ) &&
810
- ! it.name.startsWith(" flink" ) &&
811
- ! it.name.startsWith(" trino-connector" ) &&
812
- it.name != " hive-metastore-common" &&
813
- it.name != " docs" && it.name != " hadoop-common" && it.parent?.name != " bundles"
814
- ) {
815
- dependsOn(" ${it.name} :build" )
816
- from(" ${it.name} /build/libs" )
817
- into(" distribution/package/libs" )
818
- include(" *.jar" )
819
- setDuplicatesStrategy(DuplicatesStrategy .INCLUDE )
820
- }
821
- }
822
- }
823
-
824
- register(" copyCatalogLibAndConfigs" , Copy ::class ) {
825
- dependsOn(
826
- " :catalogs:catalog-hive:copyLibAndConfig" ,
827
- " :catalogs:catalog-lakehouse-iceberg:copyLibAndConfig" ,
828
- " :catalogs:catalog-lakehouse-paimon:copyLibAndConfig" ,
829
- " catalogs:catalog-lakehouse-hudi:copyLibAndConfig" ,
830
- " :catalogs:catalog-jdbc-doris:copyLibAndConfig" ,
831
- " :catalogs:catalog-jdbc-mysql:copyLibAndConfig" ,
832
- " :catalogs:catalog-jdbc-oceanbase:copyLibAndConfig" ,
833
- " :catalogs:catalog-jdbc-postgresql:copyLibAndConfig" ,
834
- " :catalogs:catalog-hadoop:copyLibAndConfig" ,
835
- " :catalogs:catalog-kafka:copyLibAndConfig" ,
836
- " :catalogs:catalog-model:copyLibAndConfig"
837
- )
838
- }
839
-
840
868
clean {
841
869
dependsOn(cleanDistribution)
842
870
}
0 commit comments