<?xml version="1.0" encoding="UTF-8"?>
<!--
  Copyright (c) 2020-2025, NVIDIA CORPORATION.

  Licensed under the Apache License, Version 2.0 (the "License");
  you may not use this file except in compliance with the License.
  You may obtain a copy of the License at

     http://www.apache.org/licenses/LICENSE-2.0

  Unless required by applicable law or agreed to in writing, software
  distributed under the License is distributed on an "AS IS" BASIS,
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.nvidia</groupId>
    <artifactId>rapids-4-spark-parent_2.12</artifactId>
    <name>RAPIDS Accelerator for Apache Spark Root Project</name>
    <description>The root project of the RAPIDS Accelerator for Apache Spark</description>
    <version>25.02.0</version>
    <packaging>pom</packaging>

    <url>https://nvidia.github.io/spark-rapids/</url>
    <licenses>
        <license>
            <name>Apache License, Version 2.0</name>
            <url>https://www.apache.org/licenses/LICENSE-2.0.txt</url>
            <distribution>repo</distribution>
        </license>
    </licenses>
    <scm>
        <connection>scm:git:https://github.com/NVIDIA/spark-rapids.git</connection>
        <developerConnection>scm:git:git@github.com:NVIDIA/spark-rapids.git</developerConnection>
        <tag>HEAD</tag>
        <url>https://github.com/NVIDIA/spark-rapids</url>
    </scm>
    <developers>
        <developer>
            <id>revans2</id>
            <name>Robert Evans</name>
            <email>roberte@nvidia.com</email>
            <roles>
                <role>Committer</role>
            </roles>
            <timezone>-6</timezone>
        </developer>
        <developer>
            <id>tgravescs</id>
            <name>Thomas Graves</name>
            <email>tgraves@nvidia.com</email>
            <roles>
                <role>Committer</role>
            </roles>
            <timezone>-6</timezone>
        </developer>
    </developers>
    <!-- modules shared among profiles -->
    <modules>
        <module>aggregator</module>
        <module>datagen</module>
        <module>dist</module>
        <module>integration_tests</module>
        <module>shuffle-plugin</module>
        <module>sql-plugin</module>
        <module>sql-plugin-api</module>
        <module>tests</module>
        <module>tools</module>
        <module>udf-compiler</module>

        <!--
            Workaround to enforce the build order
            ROOT -> jdk-profiles -> shim-deps -> ...
        -->
        <module>shim-deps</module>
        <module>jdk-profiles</module>
    </modules>
    <profiles>
        <!-- #if scala-2.12 -->
        <profile>
            <id>release320</id>
            <activation>
                <activeByDefault>true</activeByDefault>
                <property>
                    <name>buildver</name>
                    <value>320</value>
                </property>
            </activation>
            <properties>
                <buildver>320</buildver>
                <spark.version>${spark320.version}</spark.version>
                <spark.test.version>${spark320.version}</spark.test.version>
                <parquet.hadoop.version>1.12.1</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
            </properties>
            <modules>
                <module>delta-lake/delta-20x</module>
            </modules>
        </profile>
        <!-- #endif scala-2.12 -->
        <!-- #if scala-2.12 -->
        <profile>
            <id>release321</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>321</value>
                </property>
            </activation>
            <properties>
                <buildver>321</buildver>
                <spark.version>${spark321.version}</spark.version>
                <spark.test.version>${spark321.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
            </properties>
            <modules>
                <module>delta-lake/delta-20x</module>
            </modules>
        </profile>
        <!-- #endif scala-2.12 -->
        <!-- #if scala-2.12 -->
        <profile>
            <id>release321cdh</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>321cdh</value>
                </property>
            </activation>
            <properties>
                <buildver>321cdh</buildver>
                <spark.version>${spark321cdh.version}</spark.version>
                <spark.test.version>${spark321cdh.version}</spark.test.version>
                <parquet.hadoop.version>1.10.1</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
                <cloudera.repo.enabled>true</cloudera.repo.enabled>
                <!-- #endif scala-2.12 -->
                <!-- Keeping the scala plugin version 4.3.0 for details
                check https://github.com/NVIDIA/spark-rapids/issues/11112 -->
                <!-- #if scala-2.12 -->
                <scala.plugin.version>4.3.0</scala.plugin.version>
            </properties>
            <modules>
                <module>shim-deps/cloudera</module>
                <module>delta-lake/delta-20x</module>
            </modules>
        </profile>
        <!-- #endif scala-2.12 -->
        <!-- #if scala-2.12 -->
        <profile>
            <id>release322</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>322</value>
                </property>
            </activation>
            <properties>
                <buildver>322</buildver>
                <spark.version>${spark322.version}</spark.version>
                <spark.test.version>${spark322.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
            </properties>
            <modules>
                <module>delta-lake/delta-20x</module>
            </modules>
        </profile>
        <!-- #endif scala-2.12 -->
        <!-- #if scala-2.12 -->
        <profile>
            <id>release323</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>323</value>
                </property>
            </activation>
            <properties>
                <buildver>323</buildver>
                <spark.version>${spark323.version}</spark.version>
                <spark.test.version>${spark323.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
            </properties>
            <modules>
                <module>delta-lake/delta-20x</module>
            </modules>
        </profile>
        <!-- #endif scala-2.12 -->
        <!-- #if scala-2.12 -->
        <profile>
            <id>release324</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>324</value>
                </property>
            </activation>
            <properties>
                <buildver>324</buildver>
                <spark.version>${spark324.version}</spark.version>
                <spark.test.version>${spark324.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
            </properties>
            <modules>
                <module>delta-lake/delta-20x</module>
            </modules>
        </profile>
        <!-- #endif scala-2.12 -->
        <profile>
            <id>release330</id>
            <activation>
                <!-- #if scala-2.13 --><!--
                <activeByDefault>true</activeByDefault>
                --><!-- #endif scala-2.13 -->
                <property>
                    <name>buildver</name>
                    <value>330</value>
                </property>
            </activation>
            <properties>
                <buildver>330</buildver>
                <spark.version>${spark330.version}</spark.version>
                <spark.test.version>${spark330.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
                <rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
                <rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
            </properties>
            <modules>
                <module>delta-lake/delta-21x</module>
                <module>delta-lake/delta-22x</module>
                <module>delta-lake/delta-23x</module>
            </modules>
        </profile>
        <profile>
            <id>release331</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>331</value>
                </property>
            </activation>
            <properties>
                <buildver>331</buildver>
                <spark.version>${spark331.version}</spark.version>
                <spark.test.version>${spark331.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
                <rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
                <rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
            </properties>
            <modules>
                <module>delta-lake/delta-21x</module>
                <module>delta-lake/delta-22x</module>
                <module>delta-lake/delta-23x</module>
            </modules>
        </profile>
        <profile>
            <id>release332</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>332</value>
                </property>
            </activation>
            <properties>
                <buildver>332</buildver>
                <spark.version>${spark332.version}</spark.version>
                <spark.test.version>${spark332.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
                <rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
                <rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
            </properties>
            <modules>
                <module>delta-lake/delta-21x</module>
                <module>delta-lake/delta-22x</module>
                <module>delta-lake/delta-23x</module>
            </modules>
        </profile>
        <profile>
            <id>release333</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>333</value>
                </property>
            </activation>
            <properties>
                <buildver>333</buildver>
                <spark.version>${spark333.version}</spark.version>
                <spark.test.version>${spark333.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
                <rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
                <rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
            </properties>
            <modules>
                <module>delta-lake/delta-21x</module>
                <module>delta-lake/delta-22x</module>
                <module>delta-lake/delta-23x</module>
            </modules>
        </profile>
        <profile>
            <id>release334</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>334</value>
                </property>
            </activation>
            <properties>
                <buildver>334</buildver>
                <spark.version>${spark334.version}</spark.version>
                <spark.test.version>${spark334.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
                <rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
                <rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
            </properties>
            <modules>
                <module>delta-lake/delta-21x</module>
                <module>delta-lake/delta-22x</module>
                <module>delta-lake/delta-23x</module>
            </modules>
        </profile>
        <profile>
            <id>release340</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>340</value>
                </property>
            </activation>
            <properties>
                <buildver>340</buildver>
                <spark.version>${spark340.version}</spark.version>
                <spark.test.version>${spark340.version}</spark.test.version>
                <parquet.hadoop.version>1.12.3</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-24x</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <slf4j.version>2.0.6</slf4j.version>
            </properties>
            <modules>
                <module>delta-lake/delta-24x</module>
            </modules>
        </profile>
        <profile>
            <id>release341</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>341</value>
                </property>
            </activation>
            <properties>
                <buildver>341</buildver>
                <spark.version>${spark341.version}</spark.version>
                <spark.test.version>${spark341.version}</spark.test.version>
                <parquet.hadoop.version>1.12.3</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-24x</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <slf4j.version>2.0.6</slf4j.version>
            </properties>
            <modules>
                <module>delta-lake/delta-24x</module>
            </modules>
        </profile>
        <profile>
            <id>release342</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>342</value>
                </property>
            </activation>
            <properties>
                <buildver>342</buildver>
                <spark.version>${spark342.version}</spark.version>
                <spark.test.version>${spark342.version}</spark.test.version>
                <parquet.hadoop.version>1.12.3</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-24x</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <slf4j.version>2.0.6</slf4j.version>
            </properties>
            <modules>
                <module>delta-lake/delta-24x</module>
            </modules>
        </profile>
        <profile>
            <id>release343</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>343</value>
                </property>
            </activation>
            <properties>
                <buildver>343</buildver>
                <spark.version>${spark343.version}</spark.version>
                <spark.test.version>${spark343.version}</spark.test.version>
                <parquet.hadoop.version>1.12.3</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-24x</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <slf4j.version>2.0.6</slf4j.version>
            </properties>
            <modules>
                <module>delta-lake/delta-24x</module>
            </modules>
        </profile>
        <profile>
            <id>release344</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>344</value>
                </property>
            </activation>
            <properties>
                <buildver>344</buildver>
                <spark.version>${spark344.version}</spark.version>
                <spark.test.version>${spark344.version}</spark.test.version>
                <parquet.hadoop.version>1.12.3</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-24x</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <slf4j.version>2.0.6</slf4j.version>
            </properties>
            <modules>
                <module>delta-lake/delta-24x</module>
            </modules>
        </profile>
        <profile>
            <id>release330cdh</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>330cdh</value>
                </property>
            </activation>
            <properties>
                <buildver>330cdh</buildver>
                <spark.version>${spark330cdh.version}</spark.version>
                <spark.test.version>${spark330cdh.version}</spark.test.version>
                <parquet.hadoop.version>1.10.99.7.1.8.0-801</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
                <rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
                <rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <cloudera.repo.enabled>true</cloudera.repo.enabled>
                <!-- Keeping the scala plugin version 4.3.0 for details
                check https://github.com/NVIDIA/spark-rapids/issues/11112 -->
                <scala.plugin.version>4.3.0</scala.plugin.version>
            </properties>
            <modules>
                <module>shim-deps/cloudera</module>
                <module>delta-lake/delta-21x</module>
                <module>delta-lake/delta-22x</module>
                <module>delta-lake/delta-23x</module>
            </modules>
        </profile>
        <profile>
            <id>release332cdh</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>332cdh</value>
                </property>
            </activation>
            <properties>
                <buildver>332cdh</buildver>
                <spark.version>${spark332cdh.version}</spark.version>
                <spark.test.version>${spark332cdh.version}</spark.test.version>
                <parquet.hadoop.version>1.10.99.7.1.9.0-387</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
                <rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
                <rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <cloudera.repo.enabled>true</cloudera.repo.enabled>
                <!-- Keeping the scala plugin version 4.3.0 for details
                check https://github.com/NVIDIA/spark-rapids/issues/11112 -->
                <scala.plugin.version>4.3.0</scala.plugin.version>
            </properties>
            <modules>
                <module>shim-deps/cloudera</module>
                <module>delta-lake/delta-21x</module>
                <module>delta-lake/delta-22x</module>
                <module>delta-lake/delta-23x</module>
            </modules>
        </profile>
        <profile>
            <!-- Note Databricks requires 2 properties -Ddatabricks and -Dbuildver=330db -->
            <!-- Note that 330db backports many features from Spark3.4.0 -->
            <id>release330db</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>330db</value>
                </property>
            </activation>
            <properties>
                <spark.version.classifier>spark330db</spark.version.classifier>
                <spark.version>${spark330db.version}</spark.version>
                <spark.test.version>${spark330db.version}</spark.test.version>
                <hadoop.client.version>3.3.1</hadoop.client.version>
                <rat.consoleOutput>true</rat.consoleOutput>
                <parquet.hadoop.version>1.12.0</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-${spark.version.classifier}</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
            </properties>
            <modules>
                <module>shim-deps/databricks</module>
                <module>delta-lake/delta-spark330db</module>
            </modules>
        </profile>
        <profile>
            <!-- Note Databricks requires 2 properties -Ddatabricks and -Dbuildver=332db -->
            <!-- Note that 332db backports many features from Spark3.4.0 -->
            <id>release332db</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>332db</value>
                </property>
            </activation>
            <properties>
                <spark.version.classifier>spark332db</spark.version.classifier>
                <spark.version>${spark332db.version}</spark.version>
                <spark.test.version>${spark332db.version}</spark.test.version>
                <hadoop.client.version>3.3.1</hadoop.client.version>
                <rat.consoleOutput>true</rat.consoleOutput>
                <parquet.hadoop.version>1.12.0</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-${spark.version.classifier}</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
            </properties>
            <modules>
                <module>shim-deps/databricks</module>
                <module>delta-lake/delta-spark332db</module>
            </modules>
        </profile>
        <profile>
            <!-- Note Databricks requires 2 properties -Ddatabricks and -Dbuildver=341db -->
            <id>release341db</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>341db</value>
                </property>
            </activation>
            <properties>
                <spark.version.classifier>spark341db</spark.version.classifier>
                <spark.version>${spark341db.version}</spark.version>
                <spark.test.version>${spark341db.version}</spark.test.version>
                <hadoop.client.version>3.3.1</hadoop.client.version>
                <rat.consoleOutput>true</rat.consoleOutput>
                <parquet.hadoop.version>1.12.0</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-${spark.version.classifier}</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
            </properties>
            <modules>
                <module>shim-deps/databricks</module>
                <module>delta-lake/delta-spark341db</module>
            </modules>
        </profile>
        <profile>
            <id>release350</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>350</value>
                </property>
            </activation>
            <properties>
                <buildver>350</buildver>
                <spark.version>${spark350.version}</spark.version>
                <spark.test.version>${spark350.version}</spark.test.version>
                <parquet.hadoop.version>1.13.1</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-stub</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <slf4j.version>2.0.7</slf4j.version>
            </properties>
            <modules>
                <module>delta-lake/delta-stub</module>
            </modules>
        </profile>
        <profile>
            <!-- Note Databricks requires 2 properties -Ddatabricks and -Dbuildver=350db143 -->
            <id>release350db143</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>350db143</value>
                </property>
            </activation>
            <properties>
                <!-- Downgrade scala plugin version due to: https://github.com/sbt/sbt/issues/4305 -->
                <scala.plugin.version>3.4.4</scala.plugin.version>
                <spark.version.classifier>spark350db143</spark.version.classifier>
                <spark.version>${spark350db143.version}</spark.version>
                <spark.test.version>${spark350db143.version}</spark.test.version>
                <hadoop.client.version>3.3.1</hadoop.client.version>
                <rat.consoleOutput>true</rat.consoleOutput>
                <parquet.hadoop.version>1.12.0</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-${spark.version.classifier}</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
            </properties>
            <modules>
                <module>shim-deps/databricks</module>
                <module>delta-lake/delta-spark350db143</module>
            </modules>
        </profile>
        <profile>
            <id>release351</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>351</value>
                </property>
            </activation>
            <properties>
                <buildver>351</buildver>
                <spark.version>${spark351.version}</spark.version>
                <spark.test.version>${spark351.version}</spark.test.version>
                <parquet.hadoop.version>1.13.1</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-stub</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <slf4j.version>2.0.7</slf4j.version>
            </properties>
            <modules>
                <module>delta-lake/delta-stub</module>
            </modules>
        </profile>
        <profile>
            <id>release352</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>352</value>
                </property>
            </activation>
            <properties>
                <buildver>352</buildver>
                <spark.version>${spark352.version}</spark.version>
                <spark.test.version>${spark352.version}</spark.test.version>
                <parquet.hadoop.version>1.13.1</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-stub</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <slf4j.version>2.0.7</slf4j.version>
            </properties>
            <modules>
                <module>delta-lake/delta-stub</module>
            </modules>
        </profile>
        <profile>
            <id>release353</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>353</value>
                </property>
            </activation>
            <properties>
                <buildver>353</buildver>
                <spark.version>${spark353.version}</spark.version>
                <spark.test.version>${spark353.version}</spark.test.version>
                <parquet.hadoop.version>1.13.1</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-stub</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <slf4j.version>2.0.7</slf4j.version>
            </properties>
            <modules>
                <module>delta-lake/delta-stub</module>
            </modules>
        </profile>
        <profile>
            <id>release354</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>354</value>
                </property>
            </activation>
            <properties>
                <buildver>354</buildver>
                <spark.version>${spark354.version}</spark.version>
                <spark.test.version>${spark354.version}</spark.test.version>
                <parquet.hadoop.version>1.13.1</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-stub</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <slf4j.version>2.0.7</slf4j.version>
            </properties>
            <modules>
                <module>delta-lake/delta-stub</module>
            </modules>
        </profile>
        <!-- #if scala-2.13 --><!--
        <profile>
            <id>release400</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>400</value>
                </property>
            </activation>
            <properties>
                <buildver>400</buildver>
                <spark.version>${spark400.version}</spark.version>
                <spark.test.version>${spark400.version}</spark.test.version>
                <parquet.hadoop.version>1.13.1</parquet.hadoop.version>
                <rapids.delta.artifactId1>rapids-4-spark-delta-stub</rapids.delta.artifactId1>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <slf4j.version>2.0.7</slf4j.version>
            </properties>
            <modules>
                <module>delta-lake/delta-stub</module>
            </modules>
        </profile>
        --><!-- #endif scala-2.13 -->
        <profile>
            <id>source-javadoc</id>
            <build>
                <plugins>
                    <plugin>
                        <groupId>org.apache.maven.plugins</groupId>
                        <artifactId>maven-source-plugin</artifactId>
                        <executions>
                            <execution>
                                <id>attach-source</id>
                                <phase>${rapids.source.jar.phase}</phase>
                                <goals>
                                    <goal>jar</goal>
                                </goals>
                            </execution>
                        </executions>
                    </plugin>
                </plugins>
            </build>
        </profile>
        <profile>
            <id>scala-2.12</id>
            <properties>
                <scala.binary.version>2.12</scala.binary.version>
                <scala.version>2.12.15</scala.version>
            </properties>
        </profile>
        <profile>
            <id>scala-2.13</id>
            <properties>
                <scala.binary.version>2.13</scala.binary.version>
                <scala.version>2.13.14</scala.version>
            </properties>
        </profile>
        <profile>
            <id>arm64</id>
            <properties>
                <jni.classifier>${cuda.version}-arm64</jni.classifier>
                <ucx.version>${ucx.baseVersion}-aarch64</ucx.version>
            </properties>
        </profile>
        <profile>
            <id>bloopInstall</id>
            <activation>
                <property>
                    <name>bloopInstall</name>
                    <value>true</value>
                </property>
            </activation>
            <properties>
                <skipTests>true</skipTests>
                <maven.scaladoc.skip>true</maven.scaladoc.skip>
                <maven.scalastyle.skip>true</maven.scalastyle.skip>
            </properties>
            <build>
                <plugins>
                    <plugin>
                        <groupId>org.apache.maven.plugins</groupId>
                        <artifactId>maven-enforcer-plugin</artifactId>
                        <executions>
                            <execution>
                                <id>enforce-bloop-rules</id>
                                <goals><goal>enforce</goal></goals>
                                <configuration>
                                    <rules>
                                        <requireJavaVersion>
                                            <message>Metals semantic database requires JAVA_HOME pointing to JDK 11+, actual Java version: ${java.version}</message>
                                            <version>[11,)</version>
                                        </requireJavaVersion>
                                    </rules>
                                </configuration>
                            </execution>
                        </executions>
                    </plugin>
                    <plugin>
                        <groupId>ch.epfl.scala</groupId>
                        <artifactId>bloop-maven-plugin</artifactId>
                        <executions>
                            <execution>
                                <id>generate-bloop-projects</id>
                                <goals><goal>bloopInstall</goal></goals>
                                <phase>${bloop.installPhase}</phase>
                                <configuration>
                                    <!-- Metals looks at the repo root. so to accomodate scala 2.13  define -->
                                    <bloopConfigDir>${bloop.configDirectory}</bloopConfigDir>
                                </configuration>
                            </execution>
                        </executions>
                    </plugin>
                </plugins>
            </build>
        </profile>
    </profiles>

    <properties>
        <!-- start dyn.shim properties -->
	<dyn.shim.excluded.releases></dyn.shim.excluded.releases>
        <!-- end dyn.shim properties -->

        <rapids.module>.</rapids.module>
        <rapids.secondaryCacheDir>${spark.rapids.project.basedir}/target/${spark.version.classifier}/.sbt/1.0/zinc/org.scala-sbt</rapids.secondaryCacheDir>
        <allowConventionalDistJar>false</allowConventionalDistJar>
        <buildver>320</buildver>
        <maven.compiler.source>1.8</maven.compiler.source>
        <java.major.version>8</java.major.version>
        <spark.version>${spark320.version}</spark.version>
        <spark.test.version>${spark.version}</spark.test.version>
        <parquet.hadoop.version>1.10.1</parquet.hadoop.version>
        <spark.version.classifier>spark${buildver}</spark.version.classifier>
        <cuda.version>cuda11</cuda.version>
        <jni.classifier>${cuda.version}</jni.classifier>
        <spark-rapids-jni.version>25.02.0</spark-rapids-jni.version>
        <spark-rapids-private.version>25.02.0</spark-rapids-private.version>
        <spark-rapids-hybrid.version>25.02.0</spark-rapids-hybrid.version>
        <scala.binary.version>2.12</scala.binary.version>
        <scala.recompileMode>incremental</scala.recompileMode>
        <scala.version>2.12.15</scala.version>
        <!--
        -processing
        to suppress unactionable "No processor claimed any of these annotations"
        from various dependencies. Example @UDFType
        https://github.com/openjdk/jdk17/blob/4afbcaf55383ec2f5da53282a1547bac3d099e9d/src/jdk.compiler/share/classes/com/sun/tools/javac/resources/compiler.properties#L1993-L1994
        -->
        <scala.javac.args>-Xlint:all,-serial,-path,-try,-processing|-Werror</scala.javac.args>
        <ucx.baseVersion>1.16.0</ucx.baseVersion>
        <roaringbitmap.version>1.0.6</roaringbitmap.version>
        <!-- ucx x86 is just the base version (implied), arm is specified under arm64 profile. -->
        <ucx.version>${ucx.baseVersion}</ucx.version>
        <rapids.compressed.artifact>true</rapids.compressed.artifact>
        <rapids.default.jar.excludePattern/>
        <rapids.default.jar.phase>package</rapids.default.jar.phase>
        <!--
             If the shade package changes we need to also update jenkins/spark-premerge-build.sh
             so code coverage does not include the shaded classes.
        -->
        <rapids.shade.package>${spark.version.classifier}.com.nvidia.shaded.spark</rapids.shade.package>
        <rapids.shim.jar.phase>none</rapids.shim.jar.phase>
        <rapids.shim.jar.test.phase>package</rapids.shim.jar.test.phase>

        <!--
            Dummy value just to pass the Maven artifactId format check.
            Enforcer Plugin checks for the proper value override for each releaseXYZ profile
         -->
        <rapids.delta.artifactId1>DEFINE_FOR_EVERY_SPARK_SHIM</rapids.delta.artifactId1>

        <rapids.delta.artifactId2>${rapids.delta.artifactId1}</rapids.delta.artifactId2>
        <rapids.delta.artifactId3>${rapids.delta.artifactId1}</rapids.delta.artifactId3>
        <test.include.tags/>
        <rapids.shuffle.manager.override>true</rapids.shuffle.manager.override>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <project.reporting.sourceEncoding>UTF-8</project.reporting.sourceEncoding>
        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
        <pytest.TEST_TAGS>not qarun</pytest.TEST_TAGS>
        <pytest.TEST_PARALLEL/>
        <pytest.TEST_TYPE>developer</pytest.TEST_TYPE>
        <rat.consoleOutput>false</rat.consoleOutput>
        <!--
         If you update a dependency version so it is no longer a SNAPSHOT
         please update the snapshot-shims profile as well so it is accurate -->
        <spark320.version>3.2.0</spark320.version>
        <spark321.version>3.2.1</spark321.version>
        <spark321cdh.version>3.2.1.3.2.7171000.0-3</spark321cdh.version>
        <spark322.version>3.2.2</spark322.version>
        <spark323.version>3.2.3</spark323.version>
        <spark324.version>3.2.4</spark324.version>
        <spark330.version>3.3.0</spark330.version>
        <spark331.version>3.3.1</spark331.version>
        <spark332.version>3.3.2</spark332.version>
        <spark333.version>3.3.3</spark333.version>
        <spark334.version>3.3.4</spark334.version>
        <spark340.version>3.4.0</spark340.version>
        <spark341.version>3.4.1</spark341.version>
        <spark342.version>3.4.2</spark342.version>
        <spark343.version>3.4.3</spark343.version>
        <spark344.version>3.4.4</spark344.version>
        <spark330cdh.version>3.3.0.3.3.7180.0-274</spark330cdh.version>
        <spark332cdh.version>3.3.2.3.3.7190.0-91</spark332cdh.version>
        <spark330db.version>3.3.0-databricks</spark330db.version>
        <spark332db.version>3.3.2-databricks</spark332db.version>
        <spark341db.version>3.4.1-databricks</spark341db.version>
        <spark350db143.version>3.5.0-databricks-143</spark350db143.version>
        <spark350.version>3.5.0</spark350.version>
        <spark351.version>3.5.1</spark351.version>
        <spark352.version>3.5.2</spark352.version>
        <spark353.version>3.5.3</spark353.version>
        <spark354.version>3.5.4</spark354.version>
        <!-- spark400.version>4.0.0-SNAPSHOT</spark400.version -->
        <mockito.version>3.12.4</mockito.version>
        <!-- same as Apache Spark 4.0.0 for Scala 2.13 except for cloudera shims -->
        <scala.plugin.version>4.9.1</scala.plugin.version>
        <maven.install.plugin.version>3.1.1</maven.install.plugin.version>
        <maven.jar.plugin.version>3.3.0</maven.jar.plugin.version>
        <scalatest-maven-plugin.version>2.0.2</scalatest-maven-plugin.version>
        <guava.cdh.version>30.0-jre</guava.cdh.version>
        <arrow.cdh.version>2.0.0</arrow.cdh.version>
        <slf4j.version>1.7.30</slf4j.version>
        <flatbuffers.java.version>1.11.0</flatbuffers.java.version>
        <hadoop.client.version>3.3.1</hadoop.client.version>
        <iceberg.version>0.13.2</iceberg.version>
        <scala.local-lib.path>org/scala-lang/scala-library/${scala.version}/scala-library-${scala.version}.jar</scala.local-lib.path>
        <target.classifier>${spark.version.classifier}</target.classifier>
        <maven.clean.plugin.version>3.1.0</maven.clean.plugin.version>
        <maven.scaladoc.skip>false</maven.scaladoc.skip>
        <maven.scalastyle.skip>false</maven.scalastyle.skip>
        <dist.jar.compress>true</dist.jar.compress>
        <spark330.iceberg.version>0.14.1</spark330.iceberg.version>
        <!--
            If true, disables verification that all Shims be built as of one and the same git
            commit hash. Do not use for CI!

            It is intended only for local builds of the dist module when combining locally-built shims
            with the ones deployed to a remote Maven repo
        -->
        <ignore.shim.revisions.check>false</ignore.shim.revisions.check>

        <spark.shim.dest>${project.basedir}/target/${spark.version.classifier}/generated/src</spark.shim.dest>
        <!--
          Build and run unit tests on one specific version for each sub-version (e.g. 320, 330)
          Base shim version (320 currently) should be covered in default mvn verify command of premerge script,
          so base shim version is removed from the premergeUT list.
          Separate the versions to two parts: premergeUT1(2 shims' UT + 1/3 of the integration tests)
          and premergeUT2(1 shim's UT + 2/3 of the integration tests), for balancing the duration
        -->
        <premergeUT1.buildvers>
            320,
            330
        </premergeUT1.buildvers>
        <premergeUT2.buildvers>
            340
        </premergeUT2.buildvers>
        <premergeUTF8.buildvers>
            320
        </premergeUTF8.buildvers>
        <premergeScala213.buildvers>
            333,
            340
        </premergeScala213.buildvers>
        <jdk11.buildvers>
            321,
            331,
            340
        </jdk11.buildvers>
        <jdk17.buildvers>
        </jdk17.buildvers>
        <jdk17.scala213.buildvers>
            330
            <!-- 400 -->
        </jdk17.scala213.buildvers>
        <shimplify.shims/>
        <cpd.sourceType>main</cpd.sourceType>
        <!-- SPARK-36796 for JDK-17 test-->
        <extraJavaTestArgs>
            -XX:+IgnoreUnrecognizedVMOptions
            --add-opens=java.base/java.lang=ALL-UNNAMED
            --add-opens=java.base/java.lang.invoke=ALL-UNNAMED
            --add-opens=java.base/java.lang.reflect=ALL-UNNAMED
            --add-opens=java.base/java.io=ALL-UNNAMED
            --add-opens=java.base/java.net=ALL-UNNAMED
            --add-opens=java.base/java.nio=ALL-UNNAMED
            --add-opens=java.base/java.util=ALL-UNNAMED
            --add-opens=java.base/java.util.concurrent=ALL-UNNAMED
            --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED
            --add-opens=java.base/sun.nio.ch=ALL-UNNAMED
            --add-opens=java.base/sun.nio.cs=ALL-UNNAMED
            --add-opens=java.base/sun.security.action=ALL-UNNAMED
            --add-opens=java.base/sun.util.calendar=ALL-UNNAMED
            -Djdk.reflect.useDirectMethodHandle=false
        </extraJavaTestArgs>
        <cloudera.repo.enabled>false</cloudera.repo.enabled>
        <cloudera.repo.url>https://repository.cloudera.com/artifactory/cloudera-repos</cloudera.repo.url>
        <cloudera.repo.url.fallback>https://repository.cloudera.com/repository/cloudera-repos</cloudera.repo.url.fallback>
        <bloop.installPhase>install</bloop.installPhase>
        <bloop.configDirectory>${spark.rapids.source.basedir}/.bloop</bloop.configDirectory>
        <build.info.path>${project.build.outputDirectory}/rapids4spark-version-info.properties</build.info.path>
        <nonfail.errors.quiet>false</nonfail.errors.quiet>
        <rapids.source.jar.phase>package</rapids.source.jar.phase>
    </properties>

    <dependencyManagement>
        <dependencies>
            <dependency>
              <groupId>com.nvidia</groupId>
              <artifactId>spark-rapids-jni</artifactId>
              <version>${spark-rapids-jni.version}</version>
              <classifier>${jni.classifier}</classifier>
            </dependency>
            <dependency>
                <groupId>org.openucx</groupId>
                <artifactId>jucx</artifactId>
                <version>${ucx.version}</version>
            </dependency>
            <dependency>
              <groupId>org.slf4j</groupId>
              <artifactId>jul-to-slf4j</artifactId>
              <version>${slf4j.version}</version>
              <!-- runtime scope is appropriate, but causes SBT build problems -->
            </dependency>
            <dependency>
              <groupId>org.slf4j</groupId>
              <artifactId>jcl-over-slf4j</artifactId>
              <version>${slf4j.version}</version>
              <!-- runtime scope is appropriate, but causes SBT build problems -->
            </dependency>
            <dependency>
              <groupId>org.scala-lang</groupId>
              <artifactId>scala-library</artifactId>
              <version>${scala.version}</version>
              <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.iceberg</groupId>
                <artifactId>iceberg-api</artifactId>
                <version>${iceberg.version}</version>
                <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.iceberg</groupId>
                <artifactId>iceberg-bundled-guava</artifactId>
                <version>${iceberg.version}</version>
                <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.iceberg</groupId>
                <artifactId>iceberg-core</artifactId>
                <version>${iceberg.version}</version>
                <scope>provided</scope>
            </dependency>
            <dependency>
              <groupId>org.apache.spark</groupId>
              <artifactId>spark-annotation_${scala.binary.version}</artifactId>
              <version>${spark.version}</version>
              <scope>provided</scope>
            </dependency>
            <dependency>
              <groupId>org.apache.spark</groupId>
              <artifactId>spark-hive_${scala.binary.version}</artifactId>
              <version>${spark.version}</version>
              <scope>provided</scope>
            </dependency>
            <dependency>
              <groupId>org.apache.spark</groupId>
              <artifactId>spark-sql_${scala.binary.version}</artifactId>
              <version>${spark.version}</version>
              <scope>provided</scope>
            </dependency>
            <dependency>
              <groupId>org.apache.spark</groupId>
              <artifactId>spark-avro_${scala.binary.version}</artifactId>
              <version>${spark.version}</version>
              <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>com.google.flatbuffers</groupId>
                <artifactId>flatbuffers-java</artifactId>
                <version>${flatbuffers.java.version}</version>
                <scope>compile</scope>  <!-- for shade -->
            </dependency>
            <dependency>
              <groupId>org.rogach</groupId>
              <artifactId>scallop_${scala.binary.version}</artifactId>
              <version>3.5.1</version>
            </dependency>
            <dependency>
                <!-- For shade, spark 3.2 and earlier uses an older version(0.9.0), which doesn't
                 contain the required api we use.
                 -->
                <groupId>org.roaringbitmap</groupId>
                <artifactId>RoaringBitmap</artifactId>
                <version>${roaringbitmap.version}</version>
                <scope>compile</scope>
            </dependency>
            <dependency>
              <groupId>org.scalatest</groupId>
              <artifactId>scalatest_${scala.binary.version}</artifactId>
              <version>3.2.16</version>
              <scope>test</scope>
            </dependency>
            <dependency>
                <groupId>org.scalatestplus</groupId>
                <artifactId>mockito-4-11_${scala.binary.version}</artifactId>
                <version>3.2.16.0</version>
                <scope>test</scope>
            </dependency>
            <dependency>
              <groupId>org.junit.jupiter</groupId>
              <artifactId>junit-jupiter-api</artifactId>
              <version>5.4.2</version>
              <scope>test</scope>
            </dependency>
            <dependency>
              <groupId>org.mockito</groupId>
              <artifactId>mockito-core</artifactId>
              <version>${mockito.version}</version>
              <scope>test</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.spark</groupId>
                <artifactId>spark-core_${scala.binary.version}</artifactId>
                <version>${spark.version}</version>
                <type>test-jar</type>
                <scope>test</scope>
                <exclusions>
                    <exclusion>
                        <groupId>org.apache.hadoop</groupId>
                        <artifactId>hadoop-client</artifactId>
                    </exclusion>
                    <exclusion>
                        <groupId>org.apache.hadoop</groupId>
                        <artifactId>hadoop-client-api</artifactId>
                    </exclusion>
                    <exclusion>
                        <groupId>org.apache.hadoop</groupId>
                        <artifactId>hadoop-client-runtime</artifactId>
                    </exclusion>
                    <exclusion>
                        <groupId>org.apache.curator</groupId>
                        <artifactId>curator-recipes</artifactId>
                    </exclusion>
                    <exclusion>
                        <groupId>org.slf4j</groupId>
                        <artifactId>slf4j-log4j12</artifactId>
                    </exclusion>
                    <exclusion>
                        <groupId>log4j</groupId>
                        <artifactId>log4j</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <dependency>
                <groupId>org.apache.spark</groupId>
                <artifactId>spark-catalyst_${scala.binary.version}</artifactId>
                <version>${spark.version}</version>
                <type>test-jar</type>
                <scope>test</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.spark</groupId>
                <artifactId>spark-sql_${scala.binary.version}</artifactId>
                <version>${spark.version}</version>
                <type>test-jar</type>
                <scope>test</scope>
            </dependency>
        </dependencies>
    </dependencyManagement>

    <build>
        <directory>${project.basedir}/target/${target.classifier}</directory>
        <pluginManagement>
            <plugins>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-source-plugin</artifactId>
                    <version>3.3.1</version>
                </plugin>
                <plugin>
                    <groupId>org.codehaus.mojo</groupId>
                    <artifactId>build-helper-maven-plugin</artifactId>
                    <version>3.3.0</version>
                </plugin>
                <plugin>
                  <groupId>org.apache.maven.plugins</groupId>
                  <artifactId>maven-antrun-plugin</artifactId>
                  <version>3.1.0</version>
                  <executions>
                    <execution>
                        <id>shimplify-shim-sources</id>
                        <goals><goal>run</goal></goals>
                        <phase>generate-sources</phase>
                        <configuration>
                            <target xmlns:ac="antlib:net.sf.antcontrib">
                                <property name="dyn.shim.buildver" value="all.buildvers"/>
                                <script language="jython" src="${spark.rapids.source.basedir}/build/dyn_shim_detection.py"/>
                                <scriptdef name="shimplify" language="jython" src="${spark.rapids.source.basedir}/build/shimplify.py">
                                    <attribute name="if"/>
                                </scriptdef>
                                <shimplify if="shimplify"/>
                            </target>
                        </configuration>
                    </execution>
                    <execution>
                        <id>setup-dirs</id>
                        <phase>initialize</phase>
                        <goals><goal>run</goal></goals>
                        <configuration>
                            <target>
                                <mkdir dir="${project.build.directory}/extra-resources"/>
                                <mkdir dir="${project.build.directory}/tmp"/>
                            </target>
                        </configuration>
                    </execution>
                    <execution>
                        <id>generate-build-info</id>
                        <phase>generate-resources</phase>
                        <configuration>
                            <!-- Execute the shell script to generate the plugin build information. -->
                            <target name="build-info">
                                <taskdef resource="net/sf/antcontrib/antcontrib.properties"/>
                                <exec executable="git"
                                      outputproperty="git.head.revision"
                                      discardError="${nonfail.errors.quiet}">
                                    <arg value="rev-parse"/>
                                    <arg value="HEAD"/>
                                </exec>
                                <ac:if xmlns:ac="antlib:net.sf.antcontrib">
                                    <available file="${build.info.path}" type="file"/>
                                    <then>
                                        <property file="${build.info.path}" prefix="saved.build-info"/>
                                    </then>
                                    <else>
                                        <property name="saved.build-info.revision" value="N/A"/>
                                    </else>
                                </ac:if>
                                <echo>
Comparing git revisions:
    previous=${saved.build-info.revision}
     current=${git.head.revision}
                                </echo>
                                <taskdef resource="net/sf/antcontrib/antcontrib.properties"/>
                                <ac:if xmlns:ac="antlib:net.sf.antcontrib">
                                    <equals arg1="${git.head.revision}" arg2="${saved.build-info.revision}"/>
                                    <then>
                                        <echo>
Git revisions unchanged: skipping version info file generation.
Delete ${build.info.path} or mvn clean if regeneration desired.
This will force full Scala code rebuild in downstream modules.
                                        </echo>
                                    </then>
                                    <else>
                                        <echo>Generating new version info file</echo>
                                        <mkdir dir="${project.build.outputDirectory}"/>
                                        <exec executable="bash"
                                            output="${build.info.path}"
                                            resultproperty="build-info.exitCode"
                                            errorproperty="build-info.errorMsg"
                                            failonerror="false">
                                            <arg value="${spark.rapids.source.basedir}/build/build-info"/>
                                            <arg value="${project.version}"/>
                                            <arg value="${spark-rapids-jni.version}"/>
                                        </exec>
                                        <fail message="exec build-info.sh failed, exit code is ${build-info.exitCode}, error msg is ${build-info.errorMsg}">
                                            <condition>
                                                <not>
                                                    <equals arg1="${build-info.exitCode}" arg2="0"/>
                                                </not>
                                            </condition>
                                        </fail>
                                    </else>
                                </ac:if>
                            </target>
                        </configuration>

                        <goals>
                            <goal>run</goal>
                        </goals>
                    </execution>
                    <execution>
                        <id>duplicate-code-detector</id>
                        <goals>
                            <goal>run</goal>
                        </goals>
                        <phase>none</phase>
                        <configuration>
                            <target>
                                <java classname="net.sourceforge.pmd.cpd.CPD">
                                    <arg line="--dir ${project.basedir}/src/${cpd.sourceType}"/>
                                    <arg line="${cpd.argLine}"/>
                                </java>
                            </target>
                        </configuration>
                    </execution>
                </executions>
                <dependencies>
                    <dependency>
                        <groupId>org.apache.ant</groupId>
                        <artifactId>ant</artifactId>
                        <version>1.10.12</version>
                    </dependency>
                    <dependency>
                        <groupId>ant-contrib</groupId>
                        <artifactId>ant-contrib</artifactId>
                        <version>1.0b3</version>
                    </dependency>
                    <dependency>
                        <groupId>org.python</groupId>
                        <artifactId>jython-standalone</artifactId>
                        <version>2.7.3</version>
                    </dependency>
                    <dependency>
                        <groupId>net.sourceforge.pmd</groupId>
                        <artifactId>pmd-dist</artifactId>
                        <version>6.55.0</version>
                        <exclusions>
                            <exclusion>
                                <groupId>org.apache.commons</groupId>
                                <artifactId>commons-lang3</artifactId>
                            </exclusion>
                        </exclusions>
                    </dependency>
                </dependencies>
                </plugin>
                <plugin>
                  <groupId>org.apache.maven.plugins</groupId>
                  <artifactId>maven-shade-plugin</artifactId>
                  <version>3.6.0</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-surefire-plugin</artifactId>
                    <version>2.12.4</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-compiler-plugin</artifactId>
                    <version>3.11.0</version>
                    <executions>
                        <execution>
                            <id>default-compile</id>
                            <phase>none</phase>
                        </execution>
                        <execution>
                            <id>default-testCompile</id>
                            <phase>none</phase>
                        </execution>
                    </executions>
                </plugin>
                <plugin>
                    <groupId>net.alchim31.maven</groupId>
                    <artifactId>scala-maven-plugin</artifactId>
                    <version>${scala.plugin.version}</version>
                    <executions>
                        <execution>
                            <id>eclipse-add-source</id>
                            <goals>
                                <goal>add-source</goal>
                            </goals>
                        </execution>
                        <execution>
                            <id>scala-compile-first</id>
                            <phase>process-resources</phase>
                            <goals>
                                <goal>compile</goal>
                            </goals>
                        </execution>
                        <execution>
                            <id>scala-test-compile-first</id>
                            <phase>process-test-resources</phase>
                            <goals>
                                <goal>testCompile</goal>
                            </goals>
                        </execution>
                        <execution>
                            <id>attach-scaladocs</id>
                            <phase>verify</phase>
                            <goals>
                                <goal>doc-jar</goal>
                            </goals>
                            <configuration>
                                <args>
                                    <arg>-doc-external-doc:${java.home}/lib/rt.jar#https://docs.oracle.com/javase/${java.major.version}/docs/api/index.html</arg>
                                    <arg>-doc-external-doc:${settings.localRepository}/${scala.local-lib.path}#https://scala-lang.org/api/${scala.version}/</arg>
                                    <arg>-doc-external-doc:${settings.localRepository}/org/apache/spark/spark-sql_${scala.binary.version}/${spark.version}/spark-sql_${scala.binary.version}-${spark.version}.jar#https://spark.apache.org/docs/${spark.version}/api/scala/index.html</arg>
                                </args>
                                <skip>${maven.scaladoc.skip}</skip>
                            </configuration>
                        </execution>
                    </executions>
                    <configuration>
                        <scalaVersion>${scala.version}</scalaVersion>
                        <checkMultipleScalaVersions>true</checkMultipleScalaVersions>
                        <failOnMultipleScalaVersions>true</failOnMultipleScalaVersions>
                        <recompileMode>${scala.recompileMode}</recompileMode>
                        <args>
                            <arg>-unchecked</arg>
                            <arg>-deprecation</arg>
                            <arg>-feature</arg>
                            <arg>-explaintypes</arg>
                            <arg>-Xlint:missing-interpolator</arg>
                            <!-- #if scala-2.12 -->
                            <arg>-Ywarn-unused:imports,locals,patvars,privates</arg>
                            <arg>-Yno-adapted-args</arg>
                            <arg>-Xfatal-warnings</arg>
                            <!-- #endif scala-2.12 -->
                            <arg>-Wconf:cat=lint-adapted-args:e</arg>
                            <!-- #if scala-2.13 --><!--
                            <arg>-Xsource:2.13</arg>
                            <arg>-Ywarn-unused:locals,patvars,privates</arg>
                            <arg>-Wconf:cat=deprecation:wv,any:e</arg>
                            <arg>-Wconf:cat=scaladoc:wv</arg>
                            <arg>-Wconf:cat=lint-multiarg-infix:wv</arg>
                            <arg>-Wconf:cat=other-nullary-override:wv</arg>
                            <arg>-Wconf:msg=^(?=.*?method|value|type|object|trait|inheritance)(?=.*?deprecated)(?=.*?since 2.13).+$:s</arg>
                            <arg>-Wconf:msg=^(?=.*?Widening conversion from)(?=.*?is deprecated because it loses precision).+$:s</arg>
                            <arg>-Wconf:msg=Auto-application to \`\(\)\` is deprecated:s</arg>
                            <arg>-Wconf:msg=method with a single empty parameter list overrides method without any parameter list:s</arg>
                            <arg>-Wconf:msg=method without a parameter list overrides a method with a single empty one:s</arg>
                            <arg>-Wconf:cat=deprecation&amp;msg=procedure syntax is deprecated:e</arg>
                            <arg>-Wconf:cat=unchecked&amp;msg=outer reference:s</arg>
                            <arg>-Wconf:cat=unchecked&amp;msg=eliminated by erasure:s</arg>
                            <arg>-Wconf:msg=^(?=.*?a value of type)(?=.*?cannot also be).+$:s</arg>
                            --><!-- #endif scala-2.13 -->
                        </args>
                        <jvmArgs>
                            <jvmArg>-Xms1024m</jvmArg>
                            <jvmArg>-Xmx1024m</jvmArg>
                        </jvmArgs>
                        <addJavacArgs>${scala.javac.args}</addJavacArgs>
                        <secondaryCacheDir>${rapids.secondaryCacheDir}</secondaryCacheDir>
                        <!-- #if scala-2.13 --><!--
                        <compilerPlugins combine.self="override">
                        </compilerPlugins>
                        --><!-- #endif scala-2.13 -->
                    </configuration>
                </plugin>
                <plugin>
                    <groupId>org.scalatest</groupId>
                    <artifactId>scalatest-maven-plugin</artifactId>
                    <version>${scalatest-maven-plugin.version}</version>
                    <configuration>
                        <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
                        <junitxml>.</junitxml>
                        <filereports>scala-test-output.txt</filereports>
                        <argLine>${argLine} -ea -Xmx4g -Xss4m ${extraJavaTestArgs}</argLine>
                        <stderr/>
                        <systemProperties>
                            <rapids.shuffle.manager.override>${rapids.shuffle.manager.override}</rapids.shuffle.manager.override>
                            <ai.rapids.refcount.debug>true</ai.rapids.refcount.debug>
                            <java.awt.headless>true</java.awt.headless>
                            <java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
                            <spark.ui.enabled>false</spark.ui.enabled>
                            <spark.ui.showConsoleProgress>false</spark.ui.showConsoleProgress>
                            <spark.unsafe.exceptionOnMemoryLeak>true</spark.unsafe.exceptionOnMemoryLeak>
                        </systemProperties>
                        <tagsToExclude>${test.exclude.tags}</tagsToExclude>
                        <tagsToInclude>${test.include.tags}</tagsToInclude>
                    </configuration>
                    <executions>
                        <execution>
                            <id>test</id>
                            <goals>
                                <goal>test</goal>
                            </goals>
                        </execution>
                    </executions>
                </plugin>
                <plugin>
                    <groupId>org.apache.rat</groupId>
                    <artifactId>apache-rat-plugin</artifactId>
                    <version>0.13</version>
                    <configuration>
                        <consoleOutput>${rat.consoleOutput}</consoleOutput>
                    </configuration>
                    <executions>
                        <execution>
                            <phase>verify</phase>
                            <goals>
                                <goal>check</goal>
                            </goals>
                        </execution>
                    </executions>
                </plugin>
                <plugin>
                    <groupId>org.jacoco</groupId>
                    <artifactId>jacoco-maven-plugin</artifactId>
                    <version>0.8.8</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-jar-plugin</artifactId>
                    <version>${maven.jar.plugin.version}</version>
                    <!--
                        Packaging type jar requires an output artifact without a classifier a.k.a.
                        main artifact. Modules whose real "main" output artifact is a shim jar
                        should comply by outputing a main jar without the class bytecode. In the pom
                        of such a module set
                          exludePattern to **/*
                          rapids.shim.jar.phase to package to attach the shim artifact

                       Modules feededing into dist need not be compressed.
                        Set rapids.compressed.artifact to false
                    -->
                    <executions>
                        <execution>
                            <id>default-jar</id>
                            <goals><goal>jar</goal></goals>
                            <phase>${rapids.default.jar.phase}</phase>
                            <configuration>
                                <excludes>
                                    <exclude>${rapids.default.jar.excludePattern}</exclude>
                                </excludes>
                                <archive>
                                    <compress>${rapids.compressed.artifact}</compress>
                                </archive>
                            </configuration>
                        </execution>
                        <execution>
                            <id>create-${spark.version.classifier}-jar</id>
                            <goals><goal>jar</goal></goals>
                            <phase>${rapids.shim.jar.phase}</phase>
                            <configuration>
                                <classifier>${spark.version.classifier}</classifier>
                                <archive>
                                    <compress>${rapids.compressed.artifact}</compress>
                                </archive>
                            </configuration>
                        </execution>
                        <execution>
                            <id>default-test-jar</id>
                            <phase>${rapids.shim.jar.test.phase}</phase>
                            <goals>
                                <goal>test-jar</goal>
                            </goals>
                            <configuration>
                                <classifier>${spark.version.classifier}tests</classifier>
                                <skipIfEmpty>true</skipIfEmpty>
                            </configuration>
                        </execution>
                    </executions>
                </plugin>
                <plugin>
                    <groupId>org.codehaus.mojo</groupId>
                    <artifactId>exec-maven-plugin</artifactId>
                    <version>3.0.0</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-install-plugin</artifactId>
                    <version>${maven.install.plugin.version}</version>
                </plugin>
            </plugins>
        </pluginManagement>

        <plugins>
            <plugin>
                <groupId>ch.epfl.scala</groupId>
                <artifactId>bloop-maven-plugin</artifactId>
                <version>2.0.0</version>
                <executions>
                    <execution>
                        <id>default-cli</id>
                        <configuration>
                            <skip>true</skip>
                            <!-- workaround: skip is not skipping -->
                            <bloopConfigDir>/dev/null/ERROR: Do not specify the bloop-maven-plugin on the command line. Instead invoke `mvn install -DbloopInstall ...`</bloopConfigDir>
                        </configuration>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-enforcer-plugin</artifactId>
                <version>3.5.0</version>
                <executions>
                  <execution>
                    <id>enforce-maven</id>
                    <goals>
                      <goal>enforce</goal>
                    </goals>
                    <configuration>
                      <rules>
                        <requireMavenVersion>
                          <message>Minimum Maven version 3.6.x required</message>
                          <version>[3.6,)</version>
                        </requireMavenVersion>
                        <requireProperty>
                            <regexMessage>At least one of rapids.delta.artifactId1, rapids.delta.artifactId2 ... is required in the POM profile "release${buildver}"</regexMessage>
                            <property>rapids.delta.artifactId1</property>
                            <regex>^rapids-4-spark-delta-.*</regex>
                        </requireProperty>
                        <!-- #if scala-2.12 -->
                        <requireJavaVersion>
                            <message>Only Java 8, 11, and 17 are supported!</message>
                            <version>[1.8,1.9),[11,12),[17,18)</version>
                        </requireJavaVersion>
                        <requireProperty>
                          <property>buildver</property>
                          <regex>^(?!400).*$</regex>
                          <regexMessage>Spark 4.0.0 is only supported for Scala 2.13</regexMessage>
                        </requireProperty>
                        <!-- #endif scala-2.12 -->
                        <!-- #if scala-2.13 --><!--
                        <requireJavaVersion>
                            <message>Build for Scala 2.13 is only available with Java 17+</message>
                            <version>[17,)</version>
                        </requireJavaVersion>
                        <requireProperty>
                            <regexMessage>Unexpected buildver value ${buildver} for a Scala 2.13 build, only Apache Spark versions 3.3.0 (330) and higher are supported, no vendor builds such as 330db</regexMessage>
                            <property>buildver</property>
                            <regex>(?:[3-9][3-9]|[4-9][0-9])[0-9]</regex>
                        </requireProperty>
                        --><!-- #endif scala-2.13 -->
                      </rules>
                    </configuration>
                  </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.apache.rat</groupId>
                <artifactId>apache-rat-plugin</artifactId>
                <configuration>
                    <excludes>
                        <exclude>**/*.md</exclude>
                        <exclude>**/*.iml</exclude>
                        <exclude>NOTICE-binary</exclude>
                        <exclude>docs/dev/idea-code-style-settings.xml</exclude>
                        <exclude>pom.xml.asc</exclude>
                        <exclude>jenkins/databricks/*.patch</exclude>
                        <exclude>*.jar</exclude>
                        <exclude>docs/demo/**/*.ipynb</exclude>
                        <exclude>docs/demo/**/*.zpln</exclude>
                        <exclude>**/src/main/resources/META-INF/services/*</exclude>
                        <exclude>**/src/test/resources/**</exclude>
                        <exclude>rmm_log.txt</exclude>
                        <exclude>dependency-reduced-pom*.xml</exclude>
                        <exclude>**/.*/**</exclude>
                        <exclude>**/src/main/java/com/nvidia/spark/rapids/format/*</exclude>
                        <exclude>**/*.csv</exclude>
                        <exclude>dist/*.txt</exclude>
                        <exclude>**/META-INF/com.nvidia.spark.rapids.SparkShimServiceProvider</exclude>
                        <!-- Apache Rat excludes target folder for projects that are included by
                        default, but there are some projects that are conditionally included.  -->
                        <exclude>**/target/**/*</exclude>
                        <exclude>**/cufile.log</exclude>
                        <exclude>**/cudf_log.txt</exclude>
                        <exclude>thirdparty/parquet-testing/**</exclude>
                    </excludes>
                </configuration>
            </plugin>

            <!--use this plugin to configure "spark.rapids.project.basedir" property-->
            <plugin>
                <groupId>org.commonjava.maven.plugins</groupId>
                <artifactId>directory-maven-plugin</artifactId>
                <version>0.1</version>
                <executions>
                    <execution>
                        <id>directories</id>
                        <goals>
                            <goal>highest-basedir</goal>
                        </goals>
                        <phase>initialize</phase>
                        <configuration>
                            <property>spark.rapids.project.basedir</property>
                        </configuration>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-antrun-plugin</artifactId>
                <!-- parent-pom only executions -->
                <inherited>false</inherited>
                <executions>
                    <execution>
                        <!--
                        This is an alternative implementation of the scalastyle check invocation,
                        a replacement for scalastyle-maven-plugin. It's motivated to address the following:
                        - All scala files are checked at once regardless of the module, so the developer
                        can focus on addressing violations without being distracted by the build issues
                        in-between
                        - We don't have to hardcode the source code roots added dynamically by other maven
                        plugins to the project
                        - The scalastyle launch cost is amortized across all modules
                        -->
                        <id>scalastyle-all-modules</id>
                        <phase>verify</phase>
                        <goals><goal>run</goal></goals>
                        <configuration>
                            <skip>${maven.scalastyle.skip}</skip>
                            <target>
                                <pathconvert property="scalastyle.dirs" pathsep=" ">
                                    <dirset dir="${spark.rapids.source.basedir}">
                                        <include name="**/src/main"/>
                                        <include name="**/src/test"/>
                                        <exclude name="**/target/*/generated/src/**"/>
                                    </dirset>
                                </pathconvert>
                                <echo>Checking scalastyle for all modules using following paths:
                                    ${scalastyle.dirs}
                                </echo>
                                <java classname="org.scalastyle.Main" failonerror="true">
                                    <arg line="--verbose false"/>
                                    <arg line="--warnings false"/>
                                    <arg line="--config ${spark.rapids.source.basedir}/scalastyle-config.xml"/>
                                    <arg line="--xmlOutput ${project.basedir}/target/scalastyle-output.xml"/>
                                    <arg line="--inputEncoding ${project.build.sourceEncoding}"/>
                                    <arg line="--xmlEncoding ${project.reporting.outputEncoding}"/>
                                    <arg line="${scalastyle.dirs}"/>
                                </java>
                            </target>
                        </configuration>
                    </execution>
                    <execution>
                        <id>clean-all-modules</id>
                        <phase>clean</phase>
                        <goals><goal>run</goal></goals>
                        <configuration>
                            <skip>${maven.cleanall.skip}</skip>
                            <target>
                                <!-- #if scala-2.12 -->
                                <dirset dir="${project.basedir}"
                                    includes="**/target"
                                    excludes="scala2.13/**"
                                    id="target.dirs.for.clean"/>
                                <!-- #endif scala-2.12 -->
                                <!-- #if scala-2.13 --><!--
                                <dirset dir="${project.basedir}"
                                    includes="**/target"
                                    id="target.dirs.for.clean"/>
                                --><!-- #endif scala-2.13 -->
                                <pathconvert property="target.dirs.str" pathsep=" ">
                                   <dirset refid="target.dirs.for.clean"/>
                                </pathconvert>
                                <echo>Cleaning build directories of all modules ${target.dirs.str}</echo>
                                <!-- workaround ant delete does not work with dirset -->
                                <exec dir="${project.basedir}"
                                      executable="rm"
                                      discardError="${nonfail.errors.quiet}">
                                    <arg value="-rf"/>
                                    <arg line="${target.dirs.str}"/>
                                </exec>
                            </target>
                        </configuration>
                    </execution>
                </executions>
                <dependencies>
                    <dependency>
                        <groupId>com.beautiful-scala</groupId>
                        <artifactId>scalastyle_${scala.binary.version}</artifactId>
                        <version>1.5.1</version>
                    </dependency>
                </dependencies>
            </plugin>
            <plugin>
                <groupId>org.codehaus.mojo</groupId>
                <artifactId>build-helper-maven-plugin</artifactId>
                <executions>
                    <execution>
                        <id>update-highest-source-dir</id>
                        <goals>
                            <goal>regex-property</goal>
                        </goals>
                        <phase>initialize</phase>
                        <configuration>
                            <name>spark.rapids.source.basedir</name>
                            <value>${spark.rapids.project.basedir}</value>
                            <regex>\/scala[0-9.]+</regex>
                            <replacement></replacement>
                            <failIfNoMatch>false</failIfNoMatch>
                        </configuration>
                    </execution>
                    <execution>
                        <id>update-base-source-dir</id>
                        <goals>
                            <goal>regex-property</goal>
                        </goals>
                        <phase>initialize</phase>
                        <configuration>
                            <name>origbasedir</name>
                            <value>${project.basedir}</value>
                            <regex>\/scala[0-9.]+</regex>
                            <replacement></replacement>
                            <failIfNoMatch>false</failIfNoMatch>
                        </configuration>
                    </execution>
                    <execution>
                        <id>update-shimplify-base-source-dir</id>
                        <goals>
                            <goal>regex-property</goal>
                        </goals>
                        <phase>initialize</phase>
                        <configuration>
                            <name>shimplify.src.basedir</name>
                            <value>${project.basedir}</value>
                            <regex>\/scala[0-9.]+</regex>
                            <replacement></replacement>
                            <failIfNoMatch>false</failIfNoMatch>
                        </configuration>
                    </execution>
                    <execution>
                        <id>add-sources</id>
                        <phase>generate-sources</phase>
                        <goals>
                            <goal>add-source</goal>
                        </goals>
                        <configuration>
                            <sources>
                                <!-- #if scala-2.12 -->
                                <source>${project.basedir}/src/main/scala-${scala.binary.version}</source>
                                <!-- #endif scala-2.12 -->
                                <!-- #if scala-2.13 --><!--
                                <source>${project.basedir}/../../${rapids.module}/src/main/java</source>
                                <source>${project.basedir}/../../${rapids.module}/src/main/scala</source>
                                <source>${project.basedir}/../../${rapids.module}/src/main/scala-${scala.binary.version}</source>
                                --><!-- #endif scala-2.13 -->
                            </sources>
                        </configuration>
                    </execution>
                    <execution>
                        <id>add-test-sources</id>
                        <phase>generate-test-sources</phase>
                        <goals>
                            <goal>add-test-source</goal>
                        </goals>
                        <configuration>
                            <sources>
                                <!-- #if scala-2.12 -->
                                <source>${project.basedir}/src/test/scala-${scala.binary.version}</source>
                                <!-- #endif scala-2.12 -->
                                <!-- #if scala-2.13 --><!--
                                <source>${project.basedir}/../../${rapids.module}/src/test/java</source>
                                <source>${project.basedir}/../../${rapids.module}/src/test/scala</source>
                                <source>${project.basedir}/../../${rapids.module}/src/test/scala-${scala.binary.version}</source>
                                --><!-- #endif scala-2.13 -->
                            </sources>
                        </configuration>
                    </execution>
                    <execution>
                        <id>add-shimple-sources</id>
                        <phase>generate-sources</phase>
                        <goals><goal>add-source</goal></goals>
                        <configuration>
                            <sources>
                                <source>${spark.shim.dest}/main/scala</source>
                                <source>${spark.shim.dest}/main/java</source>
                            </sources>
                        </configuration>
                    </execution>
                    <execution>
                        <id>add-shimple-test-sources</id>
                        <phase>generate-test-sources</phase>
                        <goals><goal>add-test-source</goal></goals>
                        <configuration>
                            <sources>
                                <source>${spark.shim.dest}/test/scala</source>
                                <source>${spark.shim.dest}/test/java</source>
                            </sources>
                        </configuration>
                    </execution>
                    <execution>
                        <id>add-resources</id>
                        <phase>generate-resources</phase>
                        <goals>
                            <goal>add-resource</goal>
                        </goals>
                        <configuration>
                            <resources>
                                <resource>
                                    <!-- #if scala-2.12 -->
                                    <directory>${project.basedir}/src/main/resources</directory>
                                    <!-- #endif scala-2.12 -->
                                    <!-- #if scala-2.13 --><!--
                                    <directory>${project.basedir}/../../${rapids.module}/src/main/resources</directory>
                                    --><!-- #endif scala-2.13 -->
                                </resource>
                            </resources>
                        </configuration>
                    </execution>
                    <execution>
                        <id>add-test-resources</id>
                        <phase>generate-test-resources</phase>
                        <goals>
                            <goal>add-test-resource</goal>
                        </goals>
                        <configuration>
                            <resources>
                                <resource>
                                    <!-- #if scala-2.12 -->
                                    <directory>${project.basedir}/src/test/resources</directory>
                                    <!-- #endif scala-2.12 -->
                                    <!-- #if scala-2.13 --><!--
                                    <directory>${project.basedir}/../../${rapids.module}/src/test/resources</directory>
                                    --><!-- #endif scala-2.13 -->
                                </resource>
                            </resources>
                        </configuration>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.jacoco</groupId>
                <artifactId>jacoco-maven-plugin</artifactId>
                <executions>
                    <execution>
                        <id>prepare-agent</id>
                        <goals>
                            <goal>prepare-agent</goal>
                        </goals>
                        <configuration>
                            <append>true</append>
                            <excludes>
                                <exclude>${rapids.shade.package}.*</exclude>
                            </excludes>
                            <includes>
                                <include>ai.rapids.cudf.*</include>
                                <include>com.nvidia.spark.*</include>
                                <include>org.apache.spark.sql.rapids.*</include>
                            </includes>
                        </configuration>
                    </execution>
                </executions>
            </plugin>
        </plugins>
    </build>
    <repositories>
        <repository>
            <id>snapshots-repo</id>
            <url>https://oss.sonatype.org/content/repositories/snapshots</url>
            <releases>
                <enabled>false</enabled>
            </releases>
            <snapshots>
                <enabled>true</enabled>
            </snapshots>
        </repository>
        <repository>
            <id>apache-snapshots-repo</id>
            <url>https://repository.apache.org/content/repositories/snapshots/</url>
            <releases>
                <enabled>false</enabled>
            </releases>
            <snapshots>
                <enabled>true</enabled>
            </snapshots>
        </repository>
        <repository>
            <id>central</id>
            <name>Maven Repository Switchboard</name>
            <layout>default</layout>
            <url>https://repo1.maven.org/maven2</url>
            <snapshots>
                <enabled>false</enabled>
            </snapshots>
        </repository>
        <repository>
            <id>cloudera-repo</id>
            <url>${cloudera.repo.url}</url>
            <releases>
                <enabled>${cloudera.repo.enabled}</enabled>
            </releases>
            <snapshots>
                <enabled>${cloudera.repo.enabled}</enabled>
            </snapshots>
        </repository>
        <repository>
            <id>cloudera-repo-fallback</id>
            <url>${cloudera.repo.url.fallback}</url>
            <releases>
                <enabled>${cloudera.repo.enabled}</enabled>
            </releases>
            <snapshots>
                <enabled>${cloudera.repo.enabled}</enabled>
            </snapshots>
        </repository>
    </repositories>
    <pluginRepositories>
        <pluginRepository>
            <id>central</id>
            <url>https://repo1.maven.org/maven2</url>
            <snapshots>
                <enabled>false</enabled>
            </snapshots>
        </pluginRepository>
        <pluginRepository>
            <id>cloudera-repo</id>
            <url>${cloudera.repo.url}</url>
            <releases>
                <enabled>${cloudera.repo.enabled}</enabled>
            </releases>
            <snapshots>
                <enabled>${cloudera.repo.enabled}</enabled>
            </snapshots>
        </pluginRepository>
        <pluginRepository>
            <id>cloudera-repo-fallback</id>
            <url>${cloudera.repo.url.fallback}</url>
            <releases>
                <enabled>${cloudera.repo.enabled}</enabled>
            </releases>
            <snapshots>
                <enabled>${cloudera.repo.enabled}</enabled>
            </snapshots>
        </pluginRepository>
    </pluginRepositories>
</project>
