File: integration_hdfs.sh

package info (click to toggle)
apache-arrow 23.0.1-1
  • links: PTS
  • area: main
  • in suites: sid
  • size: 76,220 kB
  • sloc: cpp: 654,608; python: 70,522; ruby: 45,964; ansic: 18,742; sh: 7,365; makefile: 669; javascript: 125; xml: 41
file content (70 lines) | stat: -rwxr-xr-x 1,942 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

set -e

# shellcheck disable=SC2034
source_dir=${1}/cpp
build_dir=${2}/cpp

HADOOP_CLASSPATH=$("$HADOOP_HOME/bin/hadoop" classpath --glob)
export CLASSPATH="${HADOOP_CLASSPATH}"
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export LIBHDFS3_CONF=$HADOOP_CONF_DIR/hdfs-site.xml
export ARROW_LIBHDFS3_DIR=$CONDA_PREFIX/lib

libhdfs_dir=$HADOOP_HOME/lib/native
hadoop_home=$HADOOP_HOME

function use_hadoop_home() {
  unset ARROW_LIBHDFS_DIR
  export HADOOP_HOME=$hadoop_home
}

function use_libhdfs_dir() {
  unset HADOOP_HOME
  export ARROW_LIBHDFS_DIR=$libhdfs_dir
}

# execute cpp tests
export ARROW_HDFS_TEST_LIBHDFS_REQUIRE=ON
pushd "${build_dir}"

debug/arrow-io-hdfs-test
debug/arrow-hdfs-test

use_libhdfs_dir
debug/arrow-io-hdfs-test
debug/arrow-hdfs-test
use_hadoop_home

popd

# cannot use --pyargs with custom arguments like --hdfs or --only-hdfs, because
# pytest ignores them, see https://github.com/pytest-dev/pytest/issues/3517
export PYARROW_TEST_HDFS=ON

export PYARROW_HDFS_TEST_LIBHDFS_REQUIRE=ON

pytest -vs --pyargs pyarrow.tests.test_fs

use_libhdfs_dir
pytest -vs --pyargs pyarrow.tests.test_fs

use_hadoop_home