-
Notifications
You must be signed in to change notification settings - Fork 3.6k
/
Copy pathmeta.yaml
448 lines (423 loc) · 18.8 KB
/
meta.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
# NOTE: In constrast to the conda-forge recipe, ARROW_VERSION is a templated variable here.
{% set version = ARROW_VERSION %}
{% set cuda_enabled = cuda_compiler_version != "None" %}
{% set build_ext_version = ARROW_VERSION %}
{% set build_ext = "cuda" if cuda_enabled else "cpu" %}
{% set proc_build_number = "0" %}
{% set llvm_version = "15" %}
# see https://github.com/apache/arrow/blob/apache-arrow-10.0.1/cpp/CMakeLists.txt#L88-L90
{% set so_version = (version.split(".")[0] | int * 100 + version.split(".")[1] | int) ~ "." ~ version.split(".")[2] ~ ".0" %}
package:
name: apache-arrow
version: {{ version }}
source:
path: ../../../../
build:
number: 0
# for cuda support, building with one version is enough to be compatible with
# all later versions, since arrow is only using libcuda, and not libcudart.
skip: true # [cuda_compiler_version not in ("None", cuda_compiler_version_min)]
# arrow promises API- & ABI-compatibility along SemVer, see #1096
run_exports:
- {{ pin_subpackage("libarrow", max_pin="x") }}
outputs:
- name: apache-arrow-proc
version: {{ build_ext_version }}
build:
number: {{ proc_build_number }}
string: {{ build_ext }}
test:
commands:
- exit 0
about:
home: http://github.com/apache/arrow
license: Apache-2.0
license_file:
- LICENSE.txt
summary: A meta-package to select Arrow build variant
# compat output for old mutex-package naming
- name: arrow-cpp-proc
version: {{ build_ext_version }}
build:
number: {{ proc_build_number }}
string: {{ build_ext }}
requirements:
run:
- apache-arrow-proc ={{ build_ext_version }}={{ build_ext }}
test:
commands:
- exit 0
- name: libarrow
script: build-arrow.sh # [unix]
script: build-arrow.bat # [win]
version: {{ version }}
build:
string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }}
run_exports:
- {{ pin_subpackage("libarrow", max_pin="x.x.x") }}
ignore_run_exports_from:
- {{ compiler("cuda") }} # [cuda_compiler_version != "None"]
# arrow only uses headers, apparently
- gflags
# shared lib linked on unix, not on win
- glog # [win]
ignore_run_exports:
# we don't need all of brotli's run-exports
- libbrotlicommon
track_features: {{ "[arrow-cuda]" if cuda_enabled else "" }}
missing_dso_whitelist:
- '*/libcuda.so.*' # [linux]
- '*/nvcuda.dll' # [win]
requirements:
build:
- {{ compiler("c") }}
- {{ compiler("cxx") }}
- {{ compiler("cuda") }} # [cuda_compiler_version != "None"]
# needs to run protoc & grpc_cpp_plugin
- libgrpc # [build_platform != target_platform]
- libprotobuf # [build_platform != target_platform]
# needed for gandiva
- clangdev {{ llvm_version }} # [build_platform != target_platform]
- llvmdev {{ llvm_version }} # [build_platform != target_platform]
- gnuconfig # [build_platform != target_platform]
- cmake
- ninja
# necessary for vendored jemalloc
- autoconf # [linux]
- make # [linux]
host:
# for required dependencies, see
# https://github.com/apache/arrow/blob/apache-arrow-11.0.0/cpp/cmake_modules/ThirdpartyToolchain.cmake#L46-L75
- clangdev {{ llvm_version }}
- llvmdev {{ llvm_version }}
- aws-crt-cpp
- aws-sdk-cpp
- boost-cpp >=1.70
- brotli
- bzip2
# not yet: https://github.com/conda-forge/cpp-opentelemetry-sdk-feedstock/issues/38
# - cpp-opentelemetry-sdk
# - proto-opentelemetry-proto =={{ cpp_opentelemetry_sdk }}
- gflags
- glog
- google-cloud-cpp
# arrow uses a customized jemalloc, see #944
# - jemalloc
- libabseil
- libgrpc
- libprotobuf
- libutf8proc
- lz4-c
- nlohmann_json
# gandiva depends on openssl
- openssl
- orc
- rapidjson
- re2
- snappy
- thrift-cpp
- ucx # [linux]
- xsimd
- zlib
- zstd
- __cuda >={{ cuda_compiler_version_min }} # [cuda_compiler_version != "None"]
# since libgoogle-cloud is static on windows, see
# https://github.com/conda-forge/google-cloud-cpp-feedstock/pull/108,
# its host deps (which aren't yet covered above) leak into the build here
- libcrc32c # [win]
- libcurl # [win]
# same for libgrpc (before 1.55.0, which is coupled with libprotobuf 4.23.x)
- c-ares # [win and libprotobuf == "3.21"]
run_constrained:
- apache-arrow-proc =*={{ build_ext }}
# make sure we don't co-install with old version of old package name
- arrow-cpp ={{ version }}
# old parquet lib output, now part of this feedstock
- parquet-cpp <0.0a0
test:
commands:
{% set headers = [
"arrow/api.h", "arrow/acero/api.h", "arrow/flight/types.h",
"arrow/flight/sql/api.h", "gandiva/engine.h", "parquet/api/reader.h"
] %}
{% for each_header in headers %}
# headers
- test -f $PREFIX/include/{{ each_header }} || (echo "{{ each_header }} not found" && exit 1) # [unix]
- if not exist %LIBRARY_INC%\{{ "\\".join(each_header.split("/")) }} exit 1 # [win]
{% endfor %}
{% set libs = (cuda_compiler_version != "None") * ["arrow_cuda"] + [
"arrow", "arrow_acero", "arrow_dataset", "arrow_flight",
"arrow_flight_sql", "arrow_substrait", "gandiva", "parquet"
] %}
{% for each_lib in libs %}
# shared
- test -f $PREFIX/lib/lib{{ each_lib }}.so # [linux]
- test -f $PREFIX/lib/lib{{ each_lib }}.dylib # [osx]
- if not exist %LIBRARY_BIN%\{{ each_lib }}.dll exit 1 # [win]
- if not exist %LIBRARY_LIB%\{{ each_lib }}.lib exit 1 # [win]
# absence of static libraries
- test ! -f $PREFIX/lib/lib{{ each_lib }}.a # [unix]
- if exist %LIBRARY_LIB%\{{ each_lib }}_static.lib exit 1 # [win]
{% endfor %}
# absence of arrow_cuda for CPU builds
- test ! -f $PREFIX/lib/libarrow_cuda.so # [(cuda_compiler_version == "None") and linux]
- test ! -f $PREFIX/lib/libarrow_cuda.a # [(cuda_compiler_version == "None") and linux]
- if exist %LIBRARY_BIN%\arrow_cuda.dll exit 1 # [(cuda_compiler_version == "None") and win]
- if exist %LIBRARY_LIB%\arrow_cuda.lib exit 1 # [(cuda_compiler_version == "None") and win]
- if exist %LIBRARY_LIB%\arrow_cuda_static.lib exit 1 # [(cuda_compiler_version == "None") and win]
# gdb-wrapper (paths are stacked intentionally)
- test -f $PREFIX/share/gdb/auto-load/$PREFIX/lib/libarrow.so.{{ so_version }}-gdb.py # [linux]
- test -f $PREFIX/share/gdb/auto-load/$PREFIX/lib/libarrow.{{ so_version }}.dylib-gdb.py # [osx]
about:
home: http://github.com/apache/arrow
license: Apache-2.0
license_file:
- LICENSE.txt
summary: C++ libraries for Apache Arrow
# compat output for old naming scheme; switched for 10.0.0; keep for a few versions
- name: arrow-cpp
version: {{ version }}
build:
string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }}
run_exports:
- {{ pin_subpackage("libarrow", max_pin="x.x.x") }}
requirements:
host:
- {{ pin_subpackage('libarrow', exact=True) }}
# avoid wrappers for different builds colliding due to identical hashes
- libprotobuf
run:
- {{ pin_subpackage('libarrow', exact=True) }}
test:
commands:
- exit 0
- name: pyarrow
script: build-pyarrow.sh # [unix]
script: build-pyarrow.bat # [win]
version: {{ version }}
build:
string: py{{ CONDA_PY }}h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }}
ignore_run_exports_from:
- {{ compiler("cuda") }} # [cuda_compiler_version != "None"]
track_features: {{ "[arrow-cuda]" if cuda_enabled else "" }}
rpaths:
- lib/
- {{ SP_DIR }}/pyarrow
missing_dso_whitelist:
# not actually missing, but installed into SP_DIR, see tests
- '*/arrow_python.dll' # [win]
- '*/arrow_python_flight.dll' # [win]
requirements:
build:
- {{ compiler("c") }}
- {{ compiler("cxx") }}
# pyarrow does not require nvcc but it needs to link against libraries in libarrow=*=*cuda
- {{ compiler("cuda") }} # [cuda_compiler_version != "None"]
- python # [build_platform != target_platform]
- cross-python_{{ target_platform }} # [build_platform != target_platform]
- cython <3 # [build_platform != target_platform]
- numpy # [build_platform != target_platform]
- cmake
- ninja
host:
# we're building for two protobuf versions, cannot pin exactly
# - {{ pin_subpackage('libarrow', exact=True) }}
- libarrow ={{ version }}=*_{{ PKG_BUILDNUM }}_{{ build_ext }}
- clangdev {{ llvm_version }}
- llvmdev {{ llvm_version }}
- cython <3
- numpy
- python
- setuptools
- setuptools_scm
run:
# - {{ pin_subpackage('libarrow', exact=True) }}
- libarrow ={{ version }}=*_{{ PKG_BUILDNUM }}_{{ build_ext }}
- {{ pin_compatible('numpy') }}
- python
run_constrained:
- apache-arrow-proc =*={{ build_ext }}
test:
files:
- test_read_parquet.py
imports:
- pyarrow
- pyarrow.dataset
- pyarrow.flight
- pyarrow.gandiva
- pyarrow.orc # [unix]
- pyarrow.parquet
- pyarrow.fs
- pyarrow._s3fs
- pyarrow._hdfs
# We can only test importing cuda package but cannot run when a
# CUDA device is not available, for instance, when building from CI.
# On Windows, we cannot even do that due to `nvcuda.dll` not being found, see
# https://conda-forge.org/docs/maintainer/knowledge_base.html#nvcuda-dll-cannot-be-found-on-windows
# However, we check below for (at least) the presence of a correctly-compiled module
- pyarrow.cuda # [cuda_compiler_version != "None" and not win]
commands:
# libraries that depend on python (and hence aren't in libarrow itself)
- test -f ${SP_DIR}/pyarrow/libarrow_python.so # [linux]
- test -f ${SP_DIR}/pyarrow/libarrow_python_flight.so # [linux]
- test -f ${SP_DIR}/pyarrow/libarrow_python.dylib # [osx]
- test -f ${SP_DIR}/pyarrow/libarrow_python_flight.dylib # [osx]
- if not exist %SP_DIR%\pyarrow\arrow_python.dll exit 1 # [win]
- if not exist %SP_DIR%\pyarrow\arrow_python_flight.dll exit 1 # [win]
- test -f ${SP_DIR}/pyarrow/include/arrow/python/pyarrow.h # [unix]
- if not exist %SP_DIR%\pyarrow\include\arrow\python\pyarrow.h exit 1 # [win]
- test ! -f ${SP_DIR}/pyarrow/tests/test_array.py # [unix]
- if exist %SP_DIR%/pyarrow/tests/test_array.py exit 1 # [win]
# Need to remove dot from PY_VER; %MYVAR:x=y% replaces "x" in %MYVAR% with "y"
- if not exist %SP_DIR%/pyarrow/_cuda.cp%PY_VER:.=%-win_amd64.pyd exit 1 # [win and cuda_compiler_version != "None"]
- python test_read_parquet.py
about:
home: http://github.com/apache/arrow
license: Apache-2.0
license_file:
- LICENSE.txt
summary: Python libraries for Apache Arrow
- name: pyarrow-tests
script: build-pyarrow.sh # [unix]
script: build-pyarrow.bat # [win]
version: {{ version }}
build:
string: py{{ CONDA_PY }}h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }}
ignore_run_exports_from:
- {{ compiler("cuda") }} # [cuda_compiler_version != "None"]
track_features: {{ "[arrow-cuda]" if cuda_enabled else "" }}
requirements:
build:
- {{ compiler("c") }}
- {{ compiler("cxx") }}
# pyarrow does not require nvcc but it needs to link against libraries in libarrow=*=*cuda
- {{ compiler("cuda") }} # [cuda_compiler_version != "None"]
- python # [build_platform != target_platform]
- cross-python_{{ target_platform }} # [build_platform != target_platform]
- cython <3 # [build_platform != target_platform]
- numpy # [build_platform != target_platform]
- cmake
- ninja
host:
- {{ pin_subpackage('libarrow', exact=True) }}
- {{ pin_subpackage('pyarrow', exact=True) }}
- clangdev {{ llvm_version }}
- llvmdev {{ llvm_version }}
- cython <3
- numpy
- python
- setuptools
- setuptools_scm
run:
- {{ pin_subpackage('pyarrow', exact=True) }}
- python
run_constrained:
- apache-arrow-proc =*={{ build_ext }}
# crossbow CI: reduce to one python version, except on (unemulated) linux, where it's fast enough
{% if linux64 or py == 311 %}
# {% if not (aarch64 or ppc64le) or py in (310, 311) %}
# only run the full test suite for one python version when in emulation (each run takes ~45min);
# there's essentially zero divergence in behaviour across python versions anyway, and otherwise
# CUDA builds for aarch/ppc consistently run out of disk space on azure for some reason
test:
requires:
# vary protobuf version in test suite (historically, test failures only have a very
# weak dependency on python version, so we don't lose coverage by doing half & half)
- libprotobuf <4 # [py % 2 == 0]
# test_cpp_extension_in_python requires a compiler
- {{ compiler("cxx") }} # [linux]
# temporary pin due to missing fixture
- pytest <7.4.0
- pytest-lazy-fixture
- backports.zoneinfo # [py<39]
- cffi
- cloudpickle
- cython <3
- fastparquet
- fsspec
- hypothesis
- pandas
- scipy
# these are generally (far) behind on migrating abseil/grpc/protobuf,
# and using them as test dependencies blocks the migrator unnecessarily
# - pytorch
# - tensorflow
# we're not building java bindings
# - jpype1
# doesn't get picked up correctly
# - libhdfs3
# causes segfaults
# - sparse
source_files:
- testing/data
commands:
- cd ${SP_DIR}/pyarrow/tests # [unix]
- cd %SP_DIR%\pyarrow\tests # [win]
- export ARROW_TEST_DATA="${SRC_DIR}/testing/data" # [unix]
- set "ARROW_TEST_DATA=%SRC_DIR%\testing\data" # [win]
{% set tests_to_skip = "_not_a_real_test" %}
# we do not have GPUs in CI --> cannot test cuda
{% set tests_to_skip = tests_to_skip + " or test_cuda" %}
# skip tests that raise SIGINT and crash the test suite
{% set tests_to_skip = tests_to_skip + " or (test_csv and test_cancellation)" %} # [linux]
{% set tests_to_skip = tests_to_skip + " or (test_flight and test_interrupt)" %} # [linux]
# tests that may crash the agent due to out-of-bound memory writes or other risky stuff
{% set tests_to_skip = tests_to_skip + " or test_debug_memory_pool" %} # [aarch64 or ppc64le]
# cannot pass -D_LIBCPP_DISABLE_AVAILABILITY to test suite for our older macos sdk
{% set tests_to_skip = tests_to_skip + " or test_cpp_extension_in_python" %} # [osx]
# skip tests that make invalid(-for-conda) assumptions about the compilers setup
{% set tests_to_skip = tests_to_skip + " or test_cython_api" %} # [unix]
{% set tests_to_skip = tests_to_skip + " or test_visit_strings" %} # [unix]
# skip tests that cannot succeed in emulation
{% set tests_to_skip = tests_to_skip + " or test_debug_memory_pool_disabled" %} # [aarch64 or ppc64le]
{% set tests_to_skip = tests_to_skip + " or test_env_var_io_thread_count" %} # [aarch64 or ppc64le]
# vvvvvvv TESTS THAT SHOULDN'T HAVE TO BE SKIPPED vvvvvvv
{% set tests_to_skip = tests_to_skip + " or test_extension_to_pandas_storage_type" %}
# segfaults on OSX: to investigate ASAP
{% set tests_to_skip = tests_to_skip + " or test_flight" %} # [osx]
# gandiva tests are segfaulting on ppc
{% set tests_to_skip = tests_to_skip + " or test_gandiva" %} # [ppc64le]
# test failures on ppc
{% set tests_to_skip = tests_to_skip + " or test_safe_cast_from_float_with_nans_to_int" %} # [ppc64le]
# gandiva tests are segfaulting on ppc
{% set tests_to_skip = tests_to_skip + " or test_float_with_null_as_integer" %} # [ppc64le]
# "Unsupported backend 'nonexistent' specified in ARROW_DEFAULT_MEMORY_POOL"
{% set tests_to_skip = tests_to_skip + " or (test_memory and test_env_var)" %} # [unix]
# test is broken; header is in $PREFIX, not $SP_DIR
{% set tests_to_skip = tests_to_skip + " or (test_misc and test_get_include)" %} # [unix]
# flaky tests that fail occasionally
{% set tests_to_skip = tests_to_skip + " or test_total_bytes_allocated " %} # [linux]
{% set tests_to_skip = tests_to_skip + " or test_feather_format " %} # [linux]
# ^^^^^^^ TESTS THAT SHOULDN'T HAVE TO BE SKIPPED ^^^^^^^
- pytest -rfEs -k "not ({{ tests_to_skip }})"
{% endif %}
about:
home: http://github.com/apache/arrow
license: Apache-2.0
license_file:
- LICENSE.txt
summary: Python test files for Apache Arrow
about:
home: http://github.com/apache/arrow
license: Apache-2.0
license_file:
- LICENSE.txt
summary: C++ and Python libraries for Apache Arrow
extra:
recipe-maintainers:
- wesm
- xhochy
- leifwalsh
- jreback
- cpcloud
- pcmoritz
- robertnishihara
- siddharthteotia
- kou
- kszucs
- pitrou
- pearu
- nealrichardson
- jakirkham
- h-vetinari
feedstock-name: arrow-cpp