From 4f84a45b79ab0bd35e90ddc85bd5d901173762ad Mon Sep 17 00:00:00 2001 From: Ronald Holshausen Date: Tue, 7 May 2024 15:29:14 +1000 Subject: [PATCH] chore: Remove non-Rust directories --- .github/workflows/build-ffi.yml | 48 - .github/workflows/compatability-suite.yml | 40 - c/consumer-verification/Makefile | 832 --- c/consumer-verification/Makefile.am | 2 - c/consumer-verification/Makefile.in | 832 --- c/consumer-verification/README.md | 95 - c/consumer-verification/aclocal.m4 | 1132 ---- c/consumer-verification/compile | 347 -- c/consumer-verification/config.h | 95 - c/consumer-verification/config.h.in | 94 - c/consumer-verification/config.status | 1195 ---- c/consumer-verification/configure | 5469 ----------------- c/consumer-verification/configure.ac | 11 - c/consumer-verification/depcomp | 791 --- c/consumer-verification/install-sh | 508 -- c/consumer-verification/libcurl.m4 | 272 - c/consumer-verification/missing | 215 - c/consumer-verification/src/Makefile | 575 -- c/consumer-verification/src/Makefile.am | 4 - c/consumer-verification/src/Makefile.in | 575 -- c/consumer-verification/src/main.c | 265 - c/consumer-verification/src/simple_pact.json | 25 - .../src/test_pact_with_bodies.json | 48 - c/provider-verifcation/CMakeLists.txt | 12 - c/provider-verifcation/src/main.c | 25 - compatibility-suite/Cargo.lock | 4260 ------------- compatibility-suite/Cargo.toml | 63 - compatibility-suite/build.rs | 7 - .../.github/workflows/triage.yml | 15 - .../pact-compatibility-suite/.gitignore | 24 - .../pact-compatibility-suite/LICENSE | 201 - .../pact-compatibility-suite/README.md | 42 - .../features/V1/http_consumer.feature | 228 - .../features/V1/http_provider.feature | 225 - .../features/V2/http_consumer.feature | 117 - .../features/V2/http_provider.feature | 43 - .../features/V3/generators.feature | 71 - .../features/V3/http_consumer.feature | 25 - .../features/V3/http_generators.feature | 53 - .../features/V3/http_matching.feature | 45 - .../features/V3/http_provider.feature | 39 - .../features/V3/matching_rules.feature | 236 - .../features/V3/message_consumer.feature | 78 - .../features/V3/message_provider.feature | 147 - .../features/V4/generators.feature | 47 - .../features/V4/http_consumer.feature | 26 - .../features/V4/http_provider.feature | 29 - .../features/V4/matching_rules.feature | 157 - .../features/V4/message_consumer.feature | 26 - .../features/V4/message_provider.feature | 22 - .../V4/synchronous_message_consumer.feature | 113 - .../features/V4/v4.feature | 9 - .../fixtures/3-level.json | 14 - .../fixtures/arraycontains-matcher-v4.json | 78 - .../fixtures/basic.json | 4 - .../fixtures/basic2.json | 4 - .../fixtures/basic3.json | 4 - .../fixtures/boolean-generator.json | 5 - .../fixtures/boolean-matcher-v3.json | 12 - .../fixtures/contenttype-matcher-v3.json | 12 - .../fixtures/date-generator.json | 5 - .../fixtures/date-matcher-v3.json | 13 - .../fixtures/datetime-generator.json | 5 - .../fixtures/decimal-type-matcher-v3.json | 12 - .../fixtures/eachkey-matcher-v4.json | 19 - .../fixtures/eachvalue-matcher-v4.json | 19 - .../fixtures/equality-matcher-reset-v3.json | 20 - .../fixtures/form-post-body.xml | 5 - .../fixtures/include-matcher-v3.json | 13 - .../fixtures/integer-type-matcher-v3.json | 12 - .../fixtures/kafka-body.xml | 7 - .../fixtures/kafka-expected-body.xml | 7 - .../fixtures/minmax-type-matcher-v3.json | 32 - .../fixtures/mockserver-generator.json | 5 - .../fixtures/multipart-body.xml | 18 - .../fixtures/multipart2-body.xml | 18 - .../fixtures/notempty-matcher-v4.json | 20 - .../fixtures/notempty2-matcher-v4.json | 12 - .../fixtures/null-matcher-v3.json | 12 - .../fixtures/number-type-matcher-v3.json | 12 - .../fixtures/pact-broker_c1.json | 279 - .../fixtures/pact-broker_c2.json | 388 -- .../fixtures/providerstate-generator.json | 5 - .../fixtures/randomdec-generator.json | 5 - .../fixtures/randomhex-generator.json | 5 - .../fixtures/randomint-generator.json | 5 - .../fixtures/randomregex-generator.json | 5 - .../fixtures/randomstr-generator.json | 5 - .../pact-compatibility-suite/fixtures/rat.jpg | Bin 28058 -> 0 bytes .../fixtures/regex-matcher-header-v2.json | 6 - .../fixtures/regex-matcher-metadata.json | 13 - .../fixtures/regex-matcher-path-v2.json | 6 - .../fixtures/regex-matcher-query-v2.json | 6 - .../fixtures/regex-matcher-v2.json | 6 - .../fixtures/sample.pdf | Bin 7498 -> 0 bytes .../fixtures/semver-matcher-v4.json | 12 - .../fixtures/siren.json | 65 - .../fixtures/siren2.json | 71 - .../fixtures/siren3.json | 65 - .../fixtures/spider.jpg | Bin 30922 -> 0 bytes .../fixtures/statuscode-matcher-v4.json | 11 - .../fixtures/text-body.xml | 5 - .../fixtures/time-generator.json | 5 - .../fixtures/type-matcher-v2.json | 5 - .../uuid-generator-lower-case-hyphenated.json | 5 - .../fixtures/uuid-generator-simple.json | 5 - .../uuid-generator-upper-case-hyphenated.json | 5 - .../fixtures/uuid-generator-urn.json | 5 - .../fixtures/uuid-generator.json | 5 - .../fixtures/values-matcher-v3.json | 20 - .../fixtures/xml-body.xml | 8 - .../fixtures/xml2-body.xml | 8 - .../tests/shared_steps/consumer.rs | 686 --- compatibility-suite/tests/shared_steps/mod.rs | 344 -- .../tests/shared_steps/provider.rs | 933 --- compatibility-suite/tests/v1_consumer.rs | 26 - compatibility-suite/tests/v1_provider.rs | 36 - compatibility-suite/tests/v2_consumer.rs | 26 - compatibility-suite/tests/v2_provider.rs | 36 - compatibility-suite/tests/v3.rs | 24 - compatibility-suite/tests/v3_message.rs | 96 - compatibility-suite/tests/v3_provider.rs | 40 - .../tests/v3_steps/generators.rs | 329 - .../tests/v3_steps/http_consumer.rs | 151 - .../tests/v3_steps/http_matching.rs | 204 - compatibility-suite/tests/v3_steps/message.rs | 913 --- compatibility-suite/tests/v3_steps/mod.rs | 60 - compatibility-suite/tests/v4.rs | 89 - .../tests/v4_steps/generators.rs | 155 - .../tests/v4_steps/http_consumer.rs | 29 - .../tests/v4_steps/http_matching.rs | 220 - .../tests/v4_steps/http_provider.rs | 309 - .../tests/v4_steps/message_consumer.rs | 26 - .../tests/v4_steps/message_provider.rs | 179 - compatibility-suite/tests/v4_steps/mod.rs | 172 - .../tests/v4_steps/sync_message_consumer.rs | 607 -- javascript/README.md | 18 - javascript/lib/simple_pact.js | 90 - javascript/lib/simple_pact_error.js | 167 - javascript/package.json | 17 - javascript/yarn.lock | 51 - php/README.md | 15 - php/composer.json | 28 - ...tor-consumer-area-calculator-provider.json | 102 - ...consumer-php-area-calculator-provider.json | 102 - php/pacts/http-consumer-1-http-provider.json | 151 - php/pacts/http-consumer-2-http-provider.json | 39 - .../message-consumer-2-message-provider.json | 46 - php/public/index.php | 34 - php/public/proxy.php | 52 - php/src/consumer-1.php | 120 - php/src/consumer-2.php | 86 - php/src/consumer-plugin.php | 77 - php/src/provider.php | 113 - proto/area_calculator.proto | 53 - python/.gitignore | 1 - python/Makefile | 33 - python/hello_ffi.py | 7 - python/pact_http_create_mock_server.py | 94 - .../pact_http_create_mock_server_for_pact.py | 132 - python/pact_message_v3.py | 85 - python/pact_plugin_grpc_v4.py | 78 - python/pact_xml.py | 104 - python/pacts/Consumer-Alice Service.json | 38 - ...sumer-python-area-calculator-provider.json | 102 - .../pacts/http-consumer-1-http-provider.json | 154 - .../pacts/http-consumer-2-http-provider.json | 42 - .../message-consumer-2-message-provider.json | 46 - python/register_ffi.py | 72 - python/requires.txt | 2 - ruby/example_consumer_spec/.rspec | 3 - ruby/example_consumer_spec/.ruby-version | 1 - ruby/example_consumer_spec/Gemfile | 7 - ruby/example_consumer_spec/Gemfile.lock | 45 - ruby/example_consumer_spec/Rakefile | 5 - .../spec/simple_consumer_spec.rb | 152 - .../example_consumer_spec/spec/spec_helper.rb | 14 - ruby/pact_mockserver_mk2/.rspec | 3 - ruby/pact_mockserver_mk2/.ruby-version | 1 - ruby/pact_mockserver_mk2/.travis.yml | 5 - ruby/pact_mockserver_mk2/CODE_OF_CONDUCT.md | 74 - ruby/pact_mockserver_mk2/Cargo.lock | 730 --- ruby/pact_mockserver_mk2/Cargo.toml | 16 - ruby/pact_mockserver_mk2/Gemfile | 6 - ruby/pact_mockserver_mk2/Gemfile.lock | 42 - ruby/pact_mockserver_mk2/LICENSE.txt | 21 - ruby/pact_mockserver_mk2/README.md | 43 - ruby/pact_mockserver_mk2/Rakefile | 10 - ruby/pact_mockserver_mk2/bin/console | 14 - ruby/pact_mockserver_mk2/bin/setup | 8 - .../lib/pact/mockserver/mk2.rb | 9 - .../lib/pact/mockserver/mk2/version.rb | 7 - .../lib/pact_mockserver_mk2.rb | 10 - .../pact_mockserver_mk2.gemspec | 37 - .../spec/pact/pact_mockserver_mk2_spec.rb | 7 - ruby/pact_mockserver_mk2/spec/spec_helper.rb | 14 - ruby/pact_mockserver_mk2/src/lib.rs | 60 - 197 files changed, 30673 deletions(-) delete mode 100644 .github/workflows/build-ffi.yml delete mode 100644 .github/workflows/compatability-suite.yml delete mode 100644 c/consumer-verification/Makefile delete mode 100644 c/consumer-verification/Makefile.am delete mode 100644 c/consumer-verification/Makefile.in delete mode 100755 c/consumer-verification/README.md delete mode 100644 c/consumer-verification/aclocal.m4 delete mode 100755 c/consumer-verification/compile delete mode 100644 c/consumer-verification/config.h delete mode 100644 c/consumer-verification/config.h.in delete mode 100755 c/consumer-verification/config.status delete mode 100755 c/consumer-verification/configure delete mode 100644 c/consumer-verification/configure.ac delete mode 100755 c/consumer-verification/depcomp delete mode 100755 c/consumer-verification/install-sh delete mode 100644 c/consumer-verification/libcurl.m4 delete mode 100755 c/consumer-verification/missing delete mode 100644 c/consumer-verification/src/Makefile delete mode 100644 c/consumer-verification/src/Makefile.am delete mode 100644 c/consumer-verification/src/Makefile.in delete mode 100644 c/consumer-verification/src/main.c delete mode 100644 c/consumer-verification/src/simple_pact.json delete mode 100644 c/consumer-verification/src/test_pact_with_bodies.json delete mode 100644 c/provider-verifcation/CMakeLists.txt delete mode 100644 c/provider-verifcation/src/main.c delete mode 100644 compatibility-suite/Cargo.lock delete mode 100644 compatibility-suite/Cargo.toml delete mode 100644 compatibility-suite/build.rs delete mode 100644 compatibility-suite/pact-compatibility-suite/.github/workflows/triage.yml delete mode 100644 compatibility-suite/pact-compatibility-suite/.gitignore delete mode 100644 compatibility-suite/pact-compatibility-suite/LICENSE delete mode 100644 compatibility-suite/pact-compatibility-suite/README.md delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V1/http_consumer.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V1/http_provider.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V2/http_consumer.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V2/http_provider.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V3/generators.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V3/http_consumer.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V3/http_generators.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V3/http_matching.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V3/http_provider.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V3/matching_rules.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V3/message_consumer.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V3/message_provider.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V4/generators.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V4/http_consumer.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V4/http_provider.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V4/matching_rules.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V4/message_consumer.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V4/message_provider.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V4/synchronous_message_consumer.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/features/V4/v4.feature delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/3-level.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/arraycontains-matcher-v4.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/basic.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/basic2.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/basic3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/boolean-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/boolean-matcher-v3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/contenttype-matcher-v3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/date-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/date-matcher-v3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/datetime-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/decimal-type-matcher-v3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/eachkey-matcher-v4.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/eachvalue-matcher-v4.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/equality-matcher-reset-v3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/form-post-body.xml delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/include-matcher-v3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/integer-type-matcher-v3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/kafka-body.xml delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/kafka-expected-body.xml delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/minmax-type-matcher-v3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/mockserver-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/multipart-body.xml delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/multipart2-body.xml delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/notempty-matcher-v4.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/notempty2-matcher-v4.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/null-matcher-v3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/number-type-matcher-v3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/pact-broker_c1.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/pact-broker_c2.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/providerstate-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/randomdec-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/randomhex-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/randomint-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/randomregex-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/randomstr-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/rat.jpg delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-header-v2.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-metadata.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-path-v2.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-query-v2.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-v2.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/sample.pdf delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/semver-matcher-v4.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/siren.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/siren2.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/siren3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/spider.jpg delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/statuscode-matcher-v4.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/text-body.xml delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/time-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/type-matcher-v2.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-lower-case-hyphenated.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-simple.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-upper-case-hyphenated.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-urn.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/values-matcher-v3.json delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/xml-body.xml delete mode 100644 compatibility-suite/pact-compatibility-suite/fixtures/xml2-body.xml delete mode 100644 compatibility-suite/tests/shared_steps/consumer.rs delete mode 100644 compatibility-suite/tests/shared_steps/mod.rs delete mode 100644 compatibility-suite/tests/shared_steps/provider.rs delete mode 100644 compatibility-suite/tests/v1_consumer.rs delete mode 100644 compatibility-suite/tests/v1_provider.rs delete mode 100644 compatibility-suite/tests/v2_consumer.rs delete mode 100644 compatibility-suite/tests/v2_provider.rs delete mode 100644 compatibility-suite/tests/v3.rs delete mode 100644 compatibility-suite/tests/v3_message.rs delete mode 100644 compatibility-suite/tests/v3_provider.rs delete mode 100644 compatibility-suite/tests/v3_steps/generators.rs delete mode 100644 compatibility-suite/tests/v3_steps/http_consumer.rs delete mode 100644 compatibility-suite/tests/v3_steps/http_matching.rs delete mode 100644 compatibility-suite/tests/v3_steps/message.rs delete mode 100644 compatibility-suite/tests/v3_steps/mod.rs delete mode 100644 compatibility-suite/tests/v4.rs delete mode 100644 compatibility-suite/tests/v4_steps/generators.rs delete mode 100644 compatibility-suite/tests/v4_steps/http_consumer.rs delete mode 100644 compatibility-suite/tests/v4_steps/http_matching.rs delete mode 100644 compatibility-suite/tests/v4_steps/http_provider.rs delete mode 100644 compatibility-suite/tests/v4_steps/message_consumer.rs delete mode 100644 compatibility-suite/tests/v4_steps/message_provider.rs delete mode 100644 compatibility-suite/tests/v4_steps/mod.rs delete mode 100644 compatibility-suite/tests/v4_steps/sync_message_consumer.rs delete mode 100644 javascript/README.md delete mode 100644 javascript/lib/simple_pact.js delete mode 100644 javascript/lib/simple_pact_error.js delete mode 100644 javascript/package.json delete mode 100644 javascript/yarn.lock delete mode 100644 php/README.md delete mode 100644 php/composer.json delete mode 100644 php/pacts/area-calculator-consumer-area-calculator-provider.json delete mode 100644 php/pacts/grpc-consumer-php-area-calculator-provider.json delete mode 100644 php/pacts/http-consumer-1-http-provider.json delete mode 100644 php/pacts/http-consumer-2-http-provider.json delete mode 100644 php/pacts/message-consumer-2-message-provider.json delete mode 100644 php/public/index.php delete mode 100644 php/public/proxy.php delete mode 100644 php/src/consumer-1.php delete mode 100644 php/src/consumer-2.php delete mode 100644 php/src/consumer-plugin.php delete mode 100644 php/src/provider.php delete mode 100644 proto/area_calculator.proto delete mode 100644 python/.gitignore delete mode 100644 python/Makefile delete mode 100644 python/hello_ffi.py delete mode 100644 python/pact_http_create_mock_server.py delete mode 100644 python/pact_http_create_mock_server_for_pact.py delete mode 100644 python/pact_message_v3.py delete mode 100644 python/pact_plugin_grpc_v4.py delete mode 100644 python/pact_xml.py delete mode 100644 python/pacts/Consumer-Alice Service.json delete mode 100644 python/pacts/grpc-consumer-python-area-calculator-provider.json delete mode 100644 python/pacts/http-consumer-1-http-provider.json delete mode 100644 python/pacts/http-consumer-2-http-provider.json delete mode 100644 python/pacts/message-consumer-2-message-provider.json delete mode 100644 python/register_ffi.py delete mode 100644 python/requires.txt delete mode 100644 ruby/example_consumer_spec/.rspec delete mode 100644 ruby/example_consumer_spec/.ruby-version delete mode 100644 ruby/example_consumer_spec/Gemfile delete mode 100644 ruby/example_consumer_spec/Gemfile.lock delete mode 100644 ruby/example_consumer_spec/Rakefile delete mode 100644 ruby/example_consumer_spec/spec/simple_consumer_spec.rb delete mode 100644 ruby/example_consumer_spec/spec/spec_helper.rb delete mode 100644 ruby/pact_mockserver_mk2/.rspec delete mode 100644 ruby/pact_mockserver_mk2/.ruby-version delete mode 100644 ruby/pact_mockserver_mk2/.travis.yml delete mode 100644 ruby/pact_mockserver_mk2/CODE_OF_CONDUCT.md delete mode 100644 ruby/pact_mockserver_mk2/Cargo.lock delete mode 100644 ruby/pact_mockserver_mk2/Cargo.toml delete mode 100644 ruby/pact_mockserver_mk2/Gemfile delete mode 100644 ruby/pact_mockserver_mk2/Gemfile.lock delete mode 100644 ruby/pact_mockserver_mk2/LICENSE.txt delete mode 100644 ruby/pact_mockserver_mk2/README.md delete mode 100644 ruby/pact_mockserver_mk2/Rakefile delete mode 100755 ruby/pact_mockserver_mk2/bin/console delete mode 100755 ruby/pact_mockserver_mk2/bin/setup delete mode 100644 ruby/pact_mockserver_mk2/lib/pact/mockserver/mk2.rb delete mode 100644 ruby/pact_mockserver_mk2/lib/pact/mockserver/mk2/version.rb delete mode 100644 ruby/pact_mockserver_mk2/lib/pact_mockserver_mk2.rb delete mode 100644 ruby/pact_mockserver_mk2/pact_mockserver_mk2.gemspec delete mode 100644 ruby/pact_mockserver_mk2/spec/pact/pact_mockserver_mk2_spec.rb delete mode 100644 ruby/pact_mockserver_mk2/spec/spec_helper.rb delete mode 100644 ruby/pact_mockserver_mk2/src/lib.rs diff --git a/.github/workflows/build-ffi.yml b/.github/workflows/build-ffi.yml deleted file mode 100644 index 3afe9d000..000000000 --- a/.github/workflows/build-ffi.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: Pact-Rust FFI Build - -on: [push, pull_request] - -jobs: - build: - runs-on: ${{ matrix.operating-system }} - strategy: - fail-fast: false - matrix: - operating-system: [ ubuntu-latest, windows-latest, macos-12 ] - rust: [ stable ] - env: - pact_do_not_track: true - steps: - - uses: actions/checkout@v3 - - run: rustc --version || true - shell: bash - - uses: dtolnay/rust-toolchain@stable - with: - toolchain: stable - - name: Install shared mime info DB - if: runner.os == 'macOS' - run: brew install shared-mime-info - - uses: dtolnay/rust-toolchain@nightly - with: - toolchain: nightly - components: rustfmt - - name: Install doxygen - if: runner.os == 'Linux' - run: sudo apt-get install -y doxygen - - name: Build pact_ffi with CMake - run: ./ci-build.sh - working-directory: rust/pact_ffi - shell: bash - - name: Run the C FFI tests - if: runner.os == 'Linux' - working-directory: c/consumer-verification - run: | - sudo apt update - sudo apt install libcurl4-openssl-dev - /usr/bin/aclocal - autoconf - automake --add-missing - ./configure - make - src/consumer-verification basic ../../rust/target/debug/libpact_ffi.so - src/consumer-verification error ../../rust/target/debug/libpact_ffi.so diff --git a/.github/workflows/compatability-suite.yml b/.github/workflows/compatability-suite.yml deleted file mode 100644 index 75b6b632b..000000000 --- a/.github/workflows/compatability-suite.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Pact-Rust Compatibility Suite - -on: [push, pull_request] - -env: - pact_do_not_track: true - -jobs: - v1: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: dtolnay/rust-toolchain@stable - - name: Run Cucumber - run: cargo test --test v1* - working-directory: compatibility-suite - v2: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: dtolnay/rust-toolchain@stable - - name: Run Cucumber - run: cargo test --test v2* - working-directory: compatibility-suite - v3: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: dtolnay/rust-toolchain@stable - - name: Run Cucumber - run: cargo test --test v3* - working-directory: compatibility-suite - v4: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: dtolnay/rust-toolchain@stable - - name: Run Cucumber - run: cargo test --test v4* - working-directory: compatibility-suite diff --git a/c/consumer-verification/Makefile b/c/consumer-verification/Makefile deleted file mode 100644 index 3e98cea7b..000000000 --- a/c/consumer-verification/Makefile +++ /dev/null @@ -1,832 +0,0 @@ -# Makefile.in generated by automake 1.16.3 from Makefile.am. -# Makefile. Generated from Makefile.in by configure. - -# Copyright (C) 1994-2020 Free Software Foundation, Inc. - -# This Makefile.in is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY, to the extent permitted by law; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. - - - - -am__is_gnu_make = { \ - if test -z '$(MAKELEVEL)'; then \ - false; \ - elif test -n '$(MAKE_HOST)'; then \ - true; \ - elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ - true; \ - else \ - false; \ - fi; \ -} -am__make_running_with_option = \ - case $${target_option-} in \ - ?) ;; \ - *) echo "am__make_running_with_option: internal error: invalid" \ - "target option '$${target_option-}' specified" >&2; \ - exit 1;; \ - esac; \ - has_opt=no; \ - sane_makeflags=$$MAKEFLAGS; \ - if $(am__is_gnu_make); then \ - sane_makeflags=$$MFLAGS; \ - else \ - case $$MAKEFLAGS in \ - *\\[\ \ ]*) \ - bs=\\; \ - sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ - | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ - esac; \ - fi; \ - skip_next=no; \ - strip_trailopt () \ - { \ - flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ - }; \ - for flg in $$sane_makeflags; do \ - test $$skip_next = yes && { skip_next=no; continue; }; \ - case $$flg in \ - *=*|--*) continue;; \ - -*I) strip_trailopt 'I'; skip_next=yes;; \ - -*I?*) strip_trailopt 'I';; \ - -*O) strip_trailopt 'O'; skip_next=yes;; \ - -*O?*) strip_trailopt 'O';; \ - -*l) strip_trailopt 'l'; skip_next=yes;; \ - -*l?*) strip_trailopt 'l';; \ - -[dEDm]) skip_next=yes;; \ - -[JT]) skip_next=yes;; \ - esac; \ - case $$flg in \ - *$$target_option*) has_opt=yes; break;; \ - esac; \ - done; \ - test $$has_opt = yes -am__make_dryrun = (target_option=n; $(am__make_running_with_option)) -am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) -pkgdatadir = $(datadir)/consumer-verification -pkgincludedir = $(includedir)/consumer-verification -pkglibdir = $(libdir)/consumer-verification -pkglibexecdir = $(libexecdir)/consumer-verification -am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd -install_sh_DATA = $(install_sh) -c -m 644 -install_sh_PROGRAM = $(install_sh) -c -install_sh_SCRIPT = $(install_sh) -c -INSTALL_HEADER = $(INSTALL_DATA) -transform = $(program_transform_name) -NORMAL_INSTALL = : -PRE_INSTALL = : -POST_INSTALL = : -NORMAL_UNINSTALL = : -PRE_UNINSTALL = : -POST_UNINSTALL = : -subdir = . -ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/libcurl.m4 \ - $(top_srcdir)/configure.ac -am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) -DIST_COMMON = $(srcdir)/Makefile.am $(top_srcdir)/configure \ - $(am__configure_deps) $(dist_doc_DATA) $(am__DIST_COMMON) -am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ - configure.lineno config.status.lineno -mkinstalldirs = $(install_sh) -d -CONFIG_HEADER = config.h -CONFIG_CLEAN_FILES = -CONFIG_CLEAN_VPATH_FILES = -AM_V_P = $(am__v_P_$(V)) -am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY)) -am__v_P_0 = false -am__v_P_1 = : -AM_V_GEN = $(am__v_GEN_$(V)) -am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY)) -am__v_GEN_0 = @echo " GEN " $@; -am__v_GEN_1 = -AM_V_at = $(am__v_at_$(V)) -am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY)) -am__v_at_0 = @ -am__v_at_1 = -SOURCES = -DIST_SOURCES = -RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ - ctags-recursive dvi-recursive html-recursive info-recursive \ - install-data-recursive install-dvi-recursive \ - install-exec-recursive install-html-recursive \ - install-info-recursive install-pdf-recursive \ - install-ps-recursive install-recursive installcheck-recursive \ - installdirs-recursive pdf-recursive ps-recursive \ - tags-recursive uninstall-recursive -am__can_run_installinfo = \ - case $$AM_UPDATE_INFO_DIR in \ - n|no|NO) false;; \ - *) (install-info --version) >/dev/null 2>&1;; \ - esac -am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; -am__vpath_adj = case $$p in \ - $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ - *) f=$$p;; \ - esac; -am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; -am__install_max = 40 -am__nobase_strip_setup = \ - srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` -am__nobase_strip = \ - for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" -am__nobase_list = $(am__nobase_strip_setup); \ - for p in $$list; do echo "$$p $$p"; done | \ - sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ - $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ - if (++n[$$2] == $(am__install_max)) \ - { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ - END { for (dir in files) print dir, files[dir] }' -am__base_list = \ - sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ - sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' -am__uninstall_files_from_dir = { \ - test -z "$$files" \ - || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ - || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ - $(am__cd) "$$dir" && rm -f $$files; }; \ - } -am__installdirs = "$(DESTDIR)$(docdir)" -DATA = $(dist_doc_DATA) -RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ - distclean-recursive maintainer-clean-recursive -am__recursive_targets = \ - $(RECURSIVE_TARGETS) \ - $(RECURSIVE_CLEAN_TARGETS) \ - $(am__extra_recursive_targets) -AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ - cscope distdir distdir-am dist dist-all distcheck -am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) \ - config.h.in -# Read a list of newline-separated strings from the standard input, -# and print each of them once, without duplicates. Input order is -# *not* preserved. -am__uniquify_input = $(AWK) '\ - BEGIN { nonempty = 0; } \ - { items[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in items) print i; }; } \ -' -# Make sure the list of sources is unique. This is necessary because, -# e.g., the same source file might be shared among _SOURCES variables -# for different programs/libraries. -am__define_uniq_tagged_files = \ - list='$(am__tagged_files)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | $(am__uniquify_input)` -ETAGS = etags -CTAGS = ctags -CSCOPE = cscope -DIST_SUBDIRS = $(SUBDIRS) -am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/config.h.in compile \ - depcomp install-sh missing -DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) -distdir = $(PACKAGE)-$(VERSION) -top_distdir = $(distdir) -am__remove_distdir = \ - if test -d "$(distdir)"; then \ - find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \ - && rm -rf "$(distdir)" \ - || { sleep 5 && rm -rf "$(distdir)"; }; \ - else :; fi -am__post_remove_distdir = $(am__remove_distdir) -am__relativize = \ - dir0=`pwd`; \ - sed_first='s,^\([^/]*\)/.*$$,\1,'; \ - sed_rest='s,^[^/]*/*,,'; \ - sed_last='s,^.*/\([^/]*\)$$,\1,'; \ - sed_butlast='s,/*[^/]*$$,,'; \ - while test -n "$$dir1"; do \ - first=`echo "$$dir1" | sed -e "$$sed_first"`; \ - if test "$$first" != "."; then \ - if test "$$first" = ".."; then \ - dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ - dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ - else \ - first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ - if test "$$first2" = "$$first"; then \ - dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ - else \ - dir2="../$$dir2"; \ - fi; \ - dir0="$$dir0"/"$$first"; \ - fi; \ - fi; \ - dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ - done; \ - reldir="$$dir2" -DIST_ARCHIVES = $(distdir).tar.gz -GZIP_ENV = --best -DIST_TARGETS = dist-gzip -# Exists only to be overridden by the user if desired. -AM_DISTCHECK_DVI_TARGET = dvi -distuninstallcheck_listfiles = find . -type f -print -am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \ - | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$' -distcleancheck_listfiles = find . -type f -print -ACLOCAL = ${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' aclocal-1.16 -AMTAR = $${TAR-tar} -AM_DEFAULT_VERBOSITY = 1 -AUTOCONF = ${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' autoconf -AUTOHEADER = ${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' autoheader -AUTOMAKE = ${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' automake-1.16 -AWK = gawk -CC = gcc -CCDEPMODE = depmode=gcc3 -CFLAGS = -g -O2 -CPPFLAGS = -CYGPATH_W = echo -DEFS = -DHAVE_CONFIG_H -DEPDIR = .deps -ECHO_C = -ECHO_N = -n -ECHO_T = -EXEEXT = -INSTALL = /usr/bin/install -c -INSTALL_DATA = ${INSTALL} -m 644 -INSTALL_PROGRAM = ${INSTALL} -INSTALL_SCRIPT = ${INSTALL} -INSTALL_STRIP_PROGRAM = $(install_sh) -c -s -LDFLAGS = -LIBCURL = -L/usr/local/lib -lcurl -LIBCURL_CPPFLAGS = -I/usr/local/include -LIBOBJS = -LIBS = -LTLIBOBJS = -MAKEINFO = ${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' makeinfo -MKDIR_P = /usr/bin/mkdir -p -OBJEXT = o -PACKAGE = consumer-verification -PACKAGE_BUGREPORT = bug-automake@gnu.org -PACKAGE_NAME = consumer-verification -PACKAGE_STRING = consumer-verification 0.0.0 -PACKAGE_TARNAME = consumer-verification -PACKAGE_URL = -PACKAGE_VERSION = 0.0.0 -PATH_SEPARATOR = : -SET_MAKE = -SHELL = /bin/bash -STRIP = -VERSION = 0.0.0 -_libcurl_config = -abs_builddir = /home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification -abs_srcdir = /home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification -abs_top_builddir = /home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification -abs_top_srcdir = /home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification -ac_ct_CC = gcc -am__include = include -am__leading_dot = . -am__quote = -am__tar = $${TAR-tar} chof - "$$tardir" -am__untar = $${TAR-tar} xf - -bindir = ${exec_prefix}/bin -build_alias = -builddir = . -datadir = ${datarootdir} -datarootdir = ${prefix}/share -docdir = ${datarootdir}/doc/${PACKAGE_TARNAME} -dvidir = ${docdir} -exec_prefix = ${prefix} -host_alias = -htmldir = ${docdir} -includedir = ${prefix}/include -infodir = ${datarootdir}/info -install_sh = ${SHELL} /home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/install-sh -libdir = ${exec_prefix}/lib -libexecdir = ${exec_prefix}/libexec -localedir = ${datarootdir}/locale -localstatedir = ${prefix}/var -mandir = ${datarootdir}/man -mkdir_p = $(MKDIR_P) -oldincludedir = /usr/include -pdfdir = ${docdir} -prefix = /usr/local -program_transform_name = s,x,x, -psdir = ${docdir} -runstatedir = ${localstatedir}/run -sbindir = ${exec_prefix}/sbin -sharedstatedir = ${prefix}/com -srcdir = . -sysconfdir = ${prefix}/etc -target_alias = -top_build_prefix = -top_builddir = . -top_srcdir = . -SUBDIRS = src -dist_doc_DATA = README.md -all: config.h - $(MAKE) $(AM_MAKEFLAGS) all-recursive - -.SUFFIXES: -am--refresh: Makefile - @: -$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) - @for dep in $?; do \ - case '$(am__configure_deps)' in \ - *$$dep*) \ - echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \ - $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \ - && exit 0; \ - exit 1;; \ - esac; \ - done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ - $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign Makefile -Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ - echo ' $(SHELL) ./config.status'; \ - $(SHELL) ./config.status;; \ - *) \ - echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles)'; \ - cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles);; \ - esac; - -$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) - $(SHELL) ./config.status --recheck - -$(top_srcdir)/configure: $(am__configure_deps) - $(am__cd) $(srcdir) && $(AUTOCONF) -$(ACLOCAL_M4): $(am__aclocal_m4_deps) - $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) -$(am__aclocal_m4_deps): - -config.h: stamp-h1 - @test -f $@ || rm -f stamp-h1 - @test -f $@ || $(MAKE) $(AM_MAKEFLAGS) stamp-h1 - -stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status - @rm -f stamp-h1 - cd $(top_builddir) && $(SHELL) ./config.status config.h -$(srcdir)/config.h.in: $(am__configure_deps) - ($(am__cd) $(top_srcdir) && $(AUTOHEADER)) - rm -f stamp-h1 - touch $@ - -distclean-hdr: - -rm -f config.h stamp-h1 -install-dist_docDATA: $(dist_doc_DATA) - @$(NORMAL_INSTALL) - @list='$(dist_doc_DATA)'; test -n "$(docdir)" || list=; \ - if test -n "$$list"; then \ - echo " $(MKDIR_P) '$(DESTDIR)$(docdir)'"; \ - $(MKDIR_P) "$(DESTDIR)$(docdir)" || exit 1; \ - fi; \ - for p in $$list; do \ - if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - echo "$$d$$p"; \ - done | $(am__base_list) | \ - while read files; do \ - echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(docdir)'"; \ - $(INSTALL_DATA) $$files "$(DESTDIR)$(docdir)" || exit $$?; \ - done - -uninstall-dist_docDATA: - @$(NORMAL_UNINSTALL) - @list='$(dist_doc_DATA)'; test -n "$(docdir)" || list=; \ - files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ - dir='$(DESTDIR)$(docdir)'; $(am__uninstall_files_from_dir) - -# This directory's subdirectories are mostly independent; you can cd -# into them and run 'make' without going through this Makefile. -# To change the values of 'make' variables: instead of editing Makefiles, -# (1) if the variable is set in 'config.status', edit 'config.status' -# (which will cause the Makefiles to be regenerated when you run 'make'); -# (2) otherwise, pass the desired values on the 'make' command line. -$(am__recursive_targets): - @fail=; \ - if $(am__make_keepgoing); then \ - failcom='fail=yes'; \ - else \ - failcom='exit 1'; \ - fi; \ - dot_seen=no; \ - target=`echo $@ | sed s/-recursive//`; \ - case "$@" in \ - distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ - *) list='$(SUBDIRS)' ;; \ - esac; \ - for subdir in $$list; do \ - echo "Making $$target in $$subdir"; \ - if test "$$subdir" = "."; then \ - dot_seen=yes; \ - local_target="$$target-am"; \ - else \ - local_target="$$target"; \ - fi; \ - ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ - || eval $$failcom; \ - done; \ - if test "$$dot_seen" = "no"; then \ - $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ - fi; test -z "$$fail" - -ID: $(am__tagged_files) - $(am__define_uniq_tagged_files); mkid -fID $$unique -tags: tags-recursive -TAGS: tags - -tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) - set x; \ - here=`pwd`; \ - if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ - include_option=--etags-include; \ - empty_fix=.; \ - else \ - include_option=--include; \ - empty_fix=; \ - fi; \ - list='$(SUBDIRS)'; for subdir in $$list; do \ - if test "$$subdir" = .; then :; else \ - test ! -f $$subdir/TAGS || \ - set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ - fi; \ - done; \ - $(am__define_uniq_tagged_files); \ - shift; \ - if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ - test -n "$$unique" || unique=$$empty_fix; \ - if test $$# -gt 0; then \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - "$$@" $$unique; \ - else \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - $$unique; \ - fi; \ - fi -ctags: ctags-recursive - -CTAGS: ctags -ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) - $(am__define_uniq_tagged_files); \ - test -z "$(CTAGS_ARGS)$$unique" \ - || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ - $$unique - -GTAGS: - here=`$(am__cd) $(top_builddir) && pwd` \ - && $(am__cd) $(top_srcdir) \ - && gtags -i $(GTAGS_ARGS) "$$here" -cscope: cscope.files - test ! -s cscope.files \ - || $(CSCOPE) -b -q $(AM_CSCOPEFLAGS) $(CSCOPEFLAGS) -i cscope.files $(CSCOPE_ARGS) -clean-cscope: - -rm -f cscope.files -cscope.files: clean-cscope cscopelist -cscopelist: cscopelist-recursive - -cscopelist-am: $(am__tagged_files) - list='$(am__tagged_files)'; \ - case "$(srcdir)" in \ - [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ - *) sdir=$(subdir)/$(srcdir) ;; \ - esac; \ - for i in $$list; do \ - if test -f "$$i"; then \ - echo "$(subdir)/$$i"; \ - else \ - echo "$$sdir/$$i"; \ - fi; \ - done >> $(top_builddir)/cscope.files - -distclean-tags: - -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags - -rm -f cscope.out cscope.in.out cscope.po.out cscope.files - -distdir: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) distdir-am - -distdir-am: $(DISTFILES) - $(am__remove_distdir) - test -d "$(distdir)" || mkdir "$(distdir)" - @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - list='$(DISTFILES)'; \ - dist_files=`for file in $$list; do echo $$file; done | \ - sed -e "s|^$$srcdirstrip/||;t" \ - -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ - case $$dist_files in \ - */*) $(MKDIR_P) `echo "$$dist_files" | \ - sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ - sort -u` ;; \ - esac; \ - for file in $$dist_files; do \ - if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ - if test -d $$d/$$file; then \ - dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ - if test -d "$(distdir)/$$file"; then \ - find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ - fi; \ - if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ - find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ - fi; \ - cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ - else \ - test -f "$(distdir)/$$file" \ - || cp -p $$d/$$file "$(distdir)/$$file" \ - || exit 1; \ - fi; \ - done - @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ - if test "$$subdir" = .; then :; else \ - $(am__make_dryrun) \ - || test -d "$(distdir)/$$subdir" \ - || $(MKDIR_P) "$(distdir)/$$subdir" \ - || exit 1; \ - dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ - $(am__relativize); \ - new_distdir=$$reldir; \ - dir1=$$subdir; dir2="$(top_distdir)"; \ - $(am__relativize); \ - new_top_distdir=$$reldir; \ - echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ - echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ - ($(am__cd) $$subdir && \ - $(MAKE) $(AM_MAKEFLAGS) \ - top_distdir="$$new_top_distdir" \ - distdir="$$new_distdir" \ - am__remove_distdir=: \ - am__skip_length_check=: \ - am__skip_mode_fix=: \ - distdir) \ - || exit 1; \ - fi; \ - done - -test -n "$(am__skip_mode_fix)" \ - || find "$(distdir)" -type d ! -perm -755 \ - -exec chmod u+rwx,go+rx {} \; -o \ - ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ - ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ - ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ - || chmod -R a+r "$(distdir)" -dist-gzip: distdir - tardir=$(distdir) && $(am__tar) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).tar.gz - $(am__post_remove_distdir) - -dist-bzip2: distdir - tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2 - $(am__post_remove_distdir) - -dist-lzip: distdir - tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz - $(am__post_remove_distdir) - -dist-xz: distdir - tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz - $(am__post_remove_distdir) - -dist-zstd: distdir - tardir=$(distdir) && $(am__tar) | zstd -c $${ZSTD_CLEVEL-$${ZSTD_OPT--19}} >$(distdir).tar.zst - $(am__post_remove_distdir) - -dist-tarZ: distdir - @echo WARNING: "Support for distribution archives compressed with" \ - "legacy program 'compress' is deprecated." >&2 - @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 - tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z - $(am__post_remove_distdir) - -dist-shar: distdir - @echo WARNING: "Support for shar distribution archives is" \ - "deprecated." >&2 - @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 - shar $(distdir) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).shar.gz - $(am__post_remove_distdir) - -dist-zip: distdir - -rm -f $(distdir).zip - zip -rq $(distdir).zip $(distdir) - $(am__post_remove_distdir) - -dist dist-all: - $(MAKE) $(AM_MAKEFLAGS) $(DIST_TARGETS) am__post_remove_distdir='@:' - $(am__post_remove_distdir) - -# This target untars the dist file and tries a VPATH configuration. Then -# it guarantees that the distribution is self-contained by making another -# tarfile. -distcheck: dist - case '$(DIST_ARCHIVES)' in \ - *.tar.gz*) \ - eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).tar.gz | $(am__untar) ;;\ - *.tar.bz2*) \ - bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ - *.tar.lz*) \ - lzip -dc $(distdir).tar.lz | $(am__untar) ;;\ - *.tar.xz*) \ - xz -dc $(distdir).tar.xz | $(am__untar) ;;\ - *.tar.Z*) \ - uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ - *.shar.gz*) \ - eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).shar.gz | unshar ;;\ - *.zip*) \ - unzip $(distdir).zip ;;\ - *.tar.zst*) \ - zstd -dc $(distdir).tar.zst | $(am__untar) ;;\ - esac - chmod -R a-w $(distdir) - chmod u+w $(distdir) - mkdir $(distdir)/_build $(distdir)/_build/sub $(distdir)/_inst - chmod a-w $(distdir) - test -d $(distdir)/_build || exit 0; \ - dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ - && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ - && am__cwd=`pwd` \ - && $(am__cd) $(distdir)/_build/sub \ - && ../../configure \ - $(AM_DISTCHECK_CONFIGURE_FLAGS) \ - $(DISTCHECK_CONFIGURE_FLAGS) \ - --srcdir=../.. --prefix="$$dc_install_base" \ - && $(MAKE) $(AM_MAKEFLAGS) \ - && $(MAKE) $(AM_MAKEFLAGS) $(AM_DISTCHECK_DVI_TARGET) \ - && $(MAKE) $(AM_MAKEFLAGS) check \ - && $(MAKE) $(AM_MAKEFLAGS) install \ - && $(MAKE) $(AM_MAKEFLAGS) installcheck \ - && $(MAKE) $(AM_MAKEFLAGS) uninstall \ - && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ - distuninstallcheck \ - && chmod -R a-w "$$dc_install_base" \ - && ({ \ - (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ - distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ - } || { rm -rf "$$dc_destdir"; exit 1; }) \ - && rm -rf "$$dc_destdir" \ - && $(MAKE) $(AM_MAKEFLAGS) dist \ - && rm -rf $(DIST_ARCHIVES) \ - && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ - && cd "$$am__cwd" \ - || exit 1 - $(am__post_remove_distdir) - @(echo "$(distdir) archives ready for distribution: "; \ - list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ - sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' -distuninstallcheck: - @test -n '$(distuninstallcheck_dir)' || { \ - echo 'ERROR: trying to run $@ with an empty' \ - '$$(distuninstallcheck_dir)' >&2; \ - exit 1; \ - }; \ - $(am__cd) '$(distuninstallcheck_dir)' || { \ - echo 'ERROR: cannot chdir into $(distuninstallcheck_dir)' >&2; \ - exit 1; \ - }; \ - test `$(am__distuninstallcheck_listfiles) | wc -l` -eq 0 \ - || { echo "ERROR: files left after uninstall:" ; \ - if test -n "$(DESTDIR)"; then \ - echo " (check DESTDIR support)"; \ - fi ; \ - $(distuninstallcheck_listfiles) ; \ - exit 1; } >&2 -distcleancheck: distclean - @if test '$(srcdir)' = . ; then \ - echo "ERROR: distcleancheck can only run from a VPATH build" ; \ - exit 1 ; \ - fi - @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ - || { echo "ERROR: files left in build directory after distclean:" ; \ - $(distcleancheck_listfiles) ; \ - exit 1; } >&2 -check-am: all-am -check: check-recursive -all-am: Makefile $(DATA) config.h -installdirs: installdirs-recursive -installdirs-am: - for dir in "$(DESTDIR)$(docdir)"; do \ - test -z "$$dir" || $(MKDIR_P) "$$dir"; \ - done -install: install-recursive -install-exec: install-exec-recursive -install-data: install-data-recursive -uninstall: uninstall-recursive - -install-am: all-am - @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am - -installcheck: installcheck-recursive -install-strip: - if test -z '$(STRIP)'; then \ - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - install; \ - else \ - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ - fi -mostlyclean-generic: - -clean-generic: - -distclean-generic: - -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) - -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) - -maintainer-clean-generic: - @echo "This command is intended for maintainers to use" - @echo "it deletes files that may require special tools to rebuild." -clean: clean-recursive - -clean-am: clean-generic mostlyclean-am - -distclean: distclean-recursive - -rm -f $(am__CONFIG_DISTCLEAN_FILES) - -rm -f Makefile -distclean-am: clean-am distclean-generic distclean-hdr distclean-tags - -dvi: dvi-recursive - -dvi-am: - -html: html-recursive - -html-am: - -info: info-recursive - -info-am: - -install-data-am: install-dist_docDATA - -install-dvi: install-dvi-recursive - -install-dvi-am: - -install-exec-am: - -install-html: install-html-recursive - -install-html-am: - -install-info: install-info-recursive - -install-info-am: - -install-man: - -install-pdf: install-pdf-recursive - -install-pdf-am: - -install-ps: install-ps-recursive - -install-ps-am: - -installcheck-am: - -maintainer-clean: maintainer-clean-recursive - -rm -f $(am__CONFIG_DISTCLEAN_FILES) - -rm -rf $(top_srcdir)/autom4te.cache - -rm -f Makefile -maintainer-clean-am: distclean-am maintainer-clean-generic - -mostlyclean: mostlyclean-recursive - -mostlyclean-am: mostlyclean-generic - -pdf: pdf-recursive - -pdf-am: - -ps: ps-recursive - -ps-am: - -uninstall-am: uninstall-dist_docDATA - -.MAKE: $(am__recursive_targets) all install-am install-strip - -.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am \ - am--refresh check check-am clean clean-cscope clean-generic \ - cscope cscopelist-am ctags ctags-am dist dist-all dist-bzip2 \ - dist-gzip dist-lzip dist-shar dist-tarZ dist-xz dist-zip \ - dist-zstd distcheck distclean distclean-generic distclean-hdr \ - distclean-tags distcleancheck distdir distuninstallcheck dvi \ - dvi-am html html-am info info-am install install-am \ - install-data install-data-am install-dist_docDATA install-dvi \ - install-dvi-am install-exec install-exec-am install-html \ - install-html-am install-info install-info-am install-man \ - install-pdf install-pdf-am install-ps install-ps-am \ - install-strip installcheck installcheck-am installdirs \ - installdirs-am maintainer-clean maintainer-clean-generic \ - mostlyclean mostlyclean-generic pdf pdf-am ps ps-am tags \ - tags-am uninstall uninstall-am uninstall-dist_docDATA - -.PRECIOUS: Makefile - - -# Tell versions [3.59,3.63) of GNU make to not export all variables. -# Otherwise a system limit (for SysV at least) may be exceeded. -.NOEXPORT: diff --git a/c/consumer-verification/Makefile.am b/c/consumer-verification/Makefile.am deleted file mode 100644 index c6231c2f1..000000000 --- a/c/consumer-verification/Makefile.am +++ /dev/null @@ -1,2 +0,0 @@ -SUBDIRS = src -dist_doc_DATA = README.md diff --git a/c/consumer-verification/Makefile.in b/c/consumer-verification/Makefile.in deleted file mode 100644 index fbb2ef9e3..000000000 --- a/c/consumer-verification/Makefile.in +++ /dev/null @@ -1,832 +0,0 @@ -# Makefile.in generated by automake 1.16.3 from Makefile.am. -# @configure_input@ - -# Copyright (C) 1994-2020 Free Software Foundation, Inc. - -# This Makefile.in is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY, to the extent permitted by law; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. - -@SET_MAKE@ - -VPATH = @srcdir@ -am__is_gnu_make = { \ - if test -z '$(MAKELEVEL)'; then \ - false; \ - elif test -n '$(MAKE_HOST)'; then \ - true; \ - elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ - true; \ - else \ - false; \ - fi; \ -} -am__make_running_with_option = \ - case $${target_option-} in \ - ?) ;; \ - *) echo "am__make_running_with_option: internal error: invalid" \ - "target option '$${target_option-}' specified" >&2; \ - exit 1;; \ - esac; \ - has_opt=no; \ - sane_makeflags=$$MAKEFLAGS; \ - if $(am__is_gnu_make); then \ - sane_makeflags=$$MFLAGS; \ - else \ - case $$MAKEFLAGS in \ - *\\[\ \ ]*) \ - bs=\\; \ - sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ - | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ - esac; \ - fi; \ - skip_next=no; \ - strip_trailopt () \ - { \ - flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ - }; \ - for flg in $$sane_makeflags; do \ - test $$skip_next = yes && { skip_next=no; continue; }; \ - case $$flg in \ - *=*|--*) continue;; \ - -*I) strip_trailopt 'I'; skip_next=yes;; \ - -*I?*) strip_trailopt 'I';; \ - -*O) strip_trailopt 'O'; skip_next=yes;; \ - -*O?*) strip_trailopt 'O';; \ - -*l) strip_trailopt 'l'; skip_next=yes;; \ - -*l?*) strip_trailopt 'l';; \ - -[dEDm]) skip_next=yes;; \ - -[JT]) skip_next=yes;; \ - esac; \ - case $$flg in \ - *$$target_option*) has_opt=yes; break;; \ - esac; \ - done; \ - test $$has_opt = yes -am__make_dryrun = (target_option=n; $(am__make_running_with_option)) -am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) -pkgdatadir = $(datadir)/@PACKAGE@ -pkgincludedir = $(includedir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ -pkglibexecdir = $(libexecdir)/@PACKAGE@ -am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd -install_sh_DATA = $(install_sh) -c -m 644 -install_sh_PROGRAM = $(install_sh) -c -install_sh_SCRIPT = $(install_sh) -c -INSTALL_HEADER = $(INSTALL_DATA) -transform = $(program_transform_name) -NORMAL_INSTALL = : -PRE_INSTALL = : -POST_INSTALL = : -NORMAL_UNINSTALL = : -PRE_UNINSTALL = : -POST_UNINSTALL = : -subdir = . -ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/libcurl.m4 \ - $(top_srcdir)/configure.ac -am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) -DIST_COMMON = $(srcdir)/Makefile.am $(top_srcdir)/configure \ - $(am__configure_deps) $(dist_doc_DATA) $(am__DIST_COMMON) -am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ - configure.lineno config.status.lineno -mkinstalldirs = $(install_sh) -d -CONFIG_HEADER = config.h -CONFIG_CLEAN_FILES = -CONFIG_CLEAN_VPATH_FILES = -AM_V_P = $(am__v_P_@AM_V@) -am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) -am__v_P_0 = false -am__v_P_1 = : -AM_V_GEN = $(am__v_GEN_@AM_V@) -am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) -am__v_GEN_0 = @echo " GEN " $@; -am__v_GEN_1 = -AM_V_at = $(am__v_at_@AM_V@) -am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) -am__v_at_0 = @ -am__v_at_1 = -SOURCES = -DIST_SOURCES = -RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ - ctags-recursive dvi-recursive html-recursive info-recursive \ - install-data-recursive install-dvi-recursive \ - install-exec-recursive install-html-recursive \ - install-info-recursive install-pdf-recursive \ - install-ps-recursive install-recursive installcheck-recursive \ - installdirs-recursive pdf-recursive ps-recursive \ - tags-recursive uninstall-recursive -am__can_run_installinfo = \ - case $$AM_UPDATE_INFO_DIR in \ - n|no|NO) false;; \ - *) (install-info --version) >/dev/null 2>&1;; \ - esac -am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; -am__vpath_adj = case $$p in \ - $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ - *) f=$$p;; \ - esac; -am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; -am__install_max = 40 -am__nobase_strip_setup = \ - srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` -am__nobase_strip = \ - for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" -am__nobase_list = $(am__nobase_strip_setup); \ - for p in $$list; do echo "$$p $$p"; done | \ - sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ - $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ - if (++n[$$2] == $(am__install_max)) \ - { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ - END { for (dir in files) print dir, files[dir] }' -am__base_list = \ - sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ - sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' -am__uninstall_files_from_dir = { \ - test -z "$$files" \ - || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ - || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ - $(am__cd) "$$dir" && rm -f $$files; }; \ - } -am__installdirs = "$(DESTDIR)$(docdir)" -DATA = $(dist_doc_DATA) -RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ - distclean-recursive maintainer-clean-recursive -am__recursive_targets = \ - $(RECURSIVE_TARGETS) \ - $(RECURSIVE_CLEAN_TARGETS) \ - $(am__extra_recursive_targets) -AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ - cscope distdir distdir-am dist dist-all distcheck -am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) \ - config.h.in -# Read a list of newline-separated strings from the standard input, -# and print each of them once, without duplicates. Input order is -# *not* preserved. -am__uniquify_input = $(AWK) '\ - BEGIN { nonempty = 0; } \ - { items[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in items) print i; }; } \ -' -# Make sure the list of sources is unique. This is necessary because, -# e.g., the same source file might be shared among _SOURCES variables -# for different programs/libraries. -am__define_uniq_tagged_files = \ - list='$(am__tagged_files)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | $(am__uniquify_input)` -ETAGS = etags -CTAGS = ctags -CSCOPE = cscope -DIST_SUBDIRS = $(SUBDIRS) -am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/config.h.in compile \ - depcomp install-sh missing -DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) -distdir = $(PACKAGE)-$(VERSION) -top_distdir = $(distdir) -am__remove_distdir = \ - if test -d "$(distdir)"; then \ - find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \ - && rm -rf "$(distdir)" \ - || { sleep 5 && rm -rf "$(distdir)"; }; \ - else :; fi -am__post_remove_distdir = $(am__remove_distdir) -am__relativize = \ - dir0=`pwd`; \ - sed_first='s,^\([^/]*\)/.*$$,\1,'; \ - sed_rest='s,^[^/]*/*,,'; \ - sed_last='s,^.*/\([^/]*\)$$,\1,'; \ - sed_butlast='s,/*[^/]*$$,,'; \ - while test -n "$$dir1"; do \ - first=`echo "$$dir1" | sed -e "$$sed_first"`; \ - if test "$$first" != "."; then \ - if test "$$first" = ".."; then \ - dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ - dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ - else \ - first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ - if test "$$first2" = "$$first"; then \ - dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ - else \ - dir2="../$$dir2"; \ - fi; \ - dir0="$$dir0"/"$$first"; \ - fi; \ - fi; \ - dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ - done; \ - reldir="$$dir2" -DIST_ARCHIVES = $(distdir).tar.gz -GZIP_ENV = --best -DIST_TARGETS = dist-gzip -# Exists only to be overridden by the user if desired. -AM_DISTCHECK_DVI_TARGET = dvi -distuninstallcheck_listfiles = find . -type f -print -am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \ - | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$' -distcleancheck_listfiles = find . -type f -print -ACLOCAL = @ACLOCAL@ -AMTAR = @AMTAR@ -AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ -AUTOCONF = @AUTOCONF@ -AUTOHEADER = @AUTOHEADER@ -AUTOMAKE = @AUTOMAKE@ -AWK = @AWK@ -CC = @CC@ -CCDEPMODE = @CCDEPMODE@ -CFLAGS = @CFLAGS@ -CPPFLAGS = @CPPFLAGS@ -CYGPATH_W = @CYGPATH_W@ -DEFS = @DEFS@ -DEPDIR = @DEPDIR@ -ECHO_C = @ECHO_C@ -ECHO_N = @ECHO_N@ -ECHO_T = @ECHO_T@ -EXEEXT = @EXEEXT@ -INSTALL = @INSTALL@ -INSTALL_DATA = @INSTALL_DATA@ -INSTALL_PROGRAM = @INSTALL_PROGRAM@ -INSTALL_SCRIPT = @INSTALL_SCRIPT@ -INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -LDFLAGS = @LDFLAGS@ -LIBCURL = @LIBCURL@ -LIBCURL_CPPFLAGS = @LIBCURL_CPPFLAGS@ -LIBOBJS = @LIBOBJS@ -LIBS = @LIBS@ -LTLIBOBJS = @LTLIBOBJS@ -MAKEINFO = @MAKEINFO@ -MKDIR_P = @MKDIR_P@ -OBJEXT = @OBJEXT@ -PACKAGE = @PACKAGE@ -PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ -PACKAGE_NAME = @PACKAGE_NAME@ -PACKAGE_STRING = @PACKAGE_STRING@ -PACKAGE_TARNAME = @PACKAGE_TARNAME@ -PACKAGE_URL = @PACKAGE_URL@ -PACKAGE_VERSION = @PACKAGE_VERSION@ -PATH_SEPARATOR = @PATH_SEPARATOR@ -SET_MAKE = @SET_MAKE@ -SHELL = @SHELL@ -STRIP = @STRIP@ -VERSION = @VERSION@ -_libcurl_config = @_libcurl_config@ -abs_builddir = @abs_builddir@ -abs_srcdir = @abs_srcdir@ -abs_top_builddir = @abs_top_builddir@ -abs_top_srcdir = @abs_top_srcdir@ -ac_ct_CC = @ac_ct_CC@ -am__include = @am__include@ -am__leading_dot = @am__leading_dot@ -am__quote = @am__quote@ -am__tar = @am__tar@ -am__untar = @am__untar@ -bindir = @bindir@ -build_alias = @build_alias@ -builddir = @builddir@ -datadir = @datadir@ -datarootdir = @datarootdir@ -docdir = @docdir@ -dvidir = @dvidir@ -exec_prefix = @exec_prefix@ -host_alias = @host_alias@ -htmldir = @htmldir@ -includedir = @includedir@ -infodir = @infodir@ -install_sh = @install_sh@ -libdir = @libdir@ -libexecdir = @libexecdir@ -localedir = @localedir@ -localstatedir = @localstatedir@ -mandir = @mandir@ -mkdir_p = @mkdir_p@ -oldincludedir = @oldincludedir@ -pdfdir = @pdfdir@ -prefix = @prefix@ -program_transform_name = @program_transform_name@ -psdir = @psdir@ -runstatedir = @runstatedir@ -sbindir = @sbindir@ -sharedstatedir = @sharedstatedir@ -srcdir = @srcdir@ -sysconfdir = @sysconfdir@ -target_alias = @target_alias@ -top_build_prefix = @top_build_prefix@ -top_builddir = @top_builddir@ -top_srcdir = @top_srcdir@ -SUBDIRS = src -dist_doc_DATA = README.md -all: config.h - $(MAKE) $(AM_MAKEFLAGS) all-recursive - -.SUFFIXES: -am--refresh: Makefile - @: -$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) - @for dep in $?; do \ - case '$(am__configure_deps)' in \ - *$$dep*) \ - echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \ - $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \ - && exit 0; \ - exit 1;; \ - esac; \ - done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ - $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign Makefile -Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ - echo ' $(SHELL) ./config.status'; \ - $(SHELL) ./config.status;; \ - *) \ - echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles)'; \ - cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles);; \ - esac; - -$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) - $(SHELL) ./config.status --recheck - -$(top_srcdir)/configure: $(am__configure_deps) - $(am__cd) $(srcdir) && $(AUTOCONF) -$(ACLOCAL_M4): $(am__aclocal_m4_deps) - $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) -$(am__aclocal_m4_deps): - -config.h: stamp-h1 - @test -f $@ || rm -f stamp-h1 - @test -f $@ || $(MAKE) $(AM_MAKEFLAGS) stamp-h1 - -stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status - @rm -f stamp-h1 - cd $(top_builddir) && $(SHELL) ./config.status config.h -$(srcdir)/config.h.in: $(am__configure_deps) - ($(am__cd) $(top_srcdir) && $(AUTOHEADER)) - rm -f stamp-h1 - touch $@ - -distclean-hdr: - -rm -f config.h stamp-h1 -install-dist_docDATA: $(dist_doc_DATA) - @$(NORMAL_INSTALL) - @list='$(dist_doc_DATA)'; test -n "$(docdir)" || list=; \ - if test -n "$$list"; then \ - echo " $(MKDIR_P) '$(DESTDIR)$(docdir)'"; \ - $(MKDIR_P) "$(DESTDIR)$(docdir)" || exit 1; \ - fi; \ - for p in $$list; do \ - if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - echo "$$d$$p"; \ - done | $(am__base_list) | \ - while read files; do \ - echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(docdir)'"; \ - $(INSTALL_DATA) $$files "$(DESTDIR)$(docdir)" || exit $$?; \ - done - -uninstall-dist_docDATA: - @$(NORMAL_UNINSTALL) - @list='$(dist_doc_DATA)'; test -n "$(docdir)" || list=; \ - files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ - dir='$(DESTDIR)$(docdir)'; $(am__uninstall_files_from_dir) - -# This directory's subdirectories are mostly independent; you can cd -# into them and run 'make' without going through this Makefile. -# To change the values of 'make' variables: instead of editing Makefiles, -# (1) if the variable is set in 'config.status', edit 'config.status' -# (which will cause the Makefiles to be regenerated when you run 'make'); -# (2) otherwise, pass the desired values on the 'make' command line. -$(am__recursive_targets): - @fail=; \ - if $(am__make_keepgoing); then \ - failcom='fail=yes'; \ - else \ - failcom='exit 1'; \ - fi; \ - dot_seen=no; \ - target=`echo $@ | sed s/-recursive//`; \ - case "$@" in \ - distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ - *) list='$(SUBDIRS)' ;; \ - esac; \ - for subdir in $$list; do \ - echo "Making $$target in $$subdir"; \ - if test "$$subdir" = "."; then \ - dot_seen=yes; \ - local_target="$$target-am"; \ - else \ - local_target="$$target"; \ - fi; \ - ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ - || eval $$failcom; \ - done; \ - if test "$$dot_seen" = "no"; then \ - $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ - fi; test -z "$$fail" - -ID: $(am__tagged_files) - $(am__define_uniq_tagged_files); mkid -fID $$unique -tags: tags-recursive -TAGS: tags - -tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) - set x; \ - here=`pwd`; \ - if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ - include_option=--etags-include; \ - empty_fix=.; \ - else \ - include_option=--include; \ - empty_fix=; \ - fi; \ - list='$(SUBDIRS)'; for subdir in $$list; do \ - if test "$$subdir" = .; then :; else \ - test ! -f $$subdir/TAGS || \ - set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ - fi; \ - done; \ - $(am__define_uniq_tagged_files); \ - shift; \ - if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ - test -n "$$unique" || unique=$$empty_fix; \ - if test $$# -gt 0; then \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - "$$@" $$unique; \ - else \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - $$unique; \ - fi; \ - fi -ctags: ctags-recursive - -CTAGS: ctags -ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) - $(am__define_uniq_tagged_files); \ - test -z "$(CTAGS_ARGS)$$unique" \ - || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ - $$unique - -GTAGS: - here=`$(am__cd) $(top_builddir) && pwd` \ - && $(am__cd) $(top_srcdir) \ - && gtags -i $(GTAGS_ARGS) "$$here" -cscope: cscope.files - test ! -s cscope.files \ - || $(CSCOPE) -b -q $(AM_CSCOPEFLAGS) $(CSCOPEFLAGS) -i cscope.files $(CSCOPE_ARGS) -clean-cscope: - -rm -f cscope.files -cscope.files: clean-cscope cscopelist -cscopelist: cscopelist-recursive - -cscopelist-am: $(am__tagged_files) - list='$(am__tagged_files)'; \ - case "$(srcdir)" in \ - [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ - *) sdir=$(subdir)/$(srcdir) ;; \ - esac; \ - for i in $$list; do \ - if test -f "$$i"; then \ - echo "$(subdir)/$$i"; \ - else \ - echo "$$sdir/$$i"; \ - fi; \ - done >> $(top_builddir)/cscope.files - -distclean-tags: - -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags - -rm -f cscope.out cscope.in.out cscope.po.out cscope.files - -distdir: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) distdir-am - -distdir-am: $(DISTFILES) - $(am__remove_distdir) - test -d "$(distdir)" || mkdir "$(distdir)" - @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - list='$(DISTFILES)'; \ - dist_files=`for file in $$list; do echo $$file; done | \ - sed -e "s|^$$srcdirstrip/||;t" \ - -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ - case $$dist_files in \ - */*) $(MKDIR_P) `echo "$$dist_files" | \ - sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ - sort -u` ;; \ - esac; \ - for file in $$dist_files; do \ - if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ - if test -d $$d/$$file; then \ - dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ - if test -d "$(distdir)/$$file"; then \ - find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ - fi; \ - if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ - find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ - fi; \ - cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ - else \ - test -f "$(distdir)/$$file" \ - || cp -p $$d/$$file "$(distdir)/$$file" \ - || exit 1; \ - fi; \ - done - @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ - if test "$$subdir" = .; then :; else \ - $(am__make_dryrun) \ - || test -d "$(distdir)/$$subdir" \ - || $(MKDIR_P) "$(distdir)/$$subdir" \ - || exit 1; \ - dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ - $(am__relativize); \ - new_distdir=$$reldir; \ - dir1=$$subdir; dir2="$(top_distdir)"; \ - $(am__relativize); \ - new_top_distdir=$$reldir; \ - echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ - echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ - ($(am__cd) $$subdir && \ - $(MAKE) $(AM_MAKEFLAGS) \ - top_distdir="$$new_top_distdir" \ - distdir="$$new_distdir" \ - am__remove_distdir=: \ - am__skip_length_check=: \ - am__skip_mode_fix=: \ - distdir) \ - || exit 1; \ - fi; \ - done - -test -n "$(am__skip_mode_fix)" \ - || find "$(distdir)" -type d ! -perm -755 \ - -exec chmod u+rwx,go+rx {} \; -o \ - ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ - ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ - ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ - || chmod -R a+r "$(distdir)" -dist-gzip: distdir - tardir=$(distdir) && $(am__tar) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).tar.gz - $(am__post_remove_distdir) - -dist-bzip2: distdir - tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2 - $(am__post_remove_distdir) - -dist-lzip: distdir - tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz - $(am__post_remove_distdir) - -dist-xz: distdir - tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz - $(am__post_remove_distdir) - -dist-zstd: distdir - tardir=$(distdir) && $(am__tar) | zstd -c $${ZSTD_CLEVEL-$${ZSTD_OPT--19}} >$(distdir).tar.zst - $(am__post_remove_distdir) - -dist-tarZ: distdir - @echo WARNING: "Support for distribution archives compressed with" \ - "legacy program 'compress' is deprecated." >&2 - @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 - tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z - $(am__post_remove_distdir) - -dist-shar: distdir - @echo WARNING: "Support for shar distribution archives is" \ - "deprecated." >&2 - @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 - shar $(distdir) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).shar.gz - $(am__post_remove_distdir) - -dist-zip: distdir - -rm -f $(distdir).zip - zip -rq $(distdir).zip $(distdir) - $(am__post_remove_distdir) - -dist dist-all: - $(MAKE) $(AM_MAKEFLAGS) $(DIST_TARGETS) am__post_remove_distdir='@:' - $(am__post_remove_distdir) - -# This target untars the dist file and tries a VPATH configuration. Then -# it guarantees that the distribution is self-contained by making another -# tarfile. -distcheck: dist - case '$(DIST_ARCHIVES)' in \ - *.tar.gz*) \ - eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).tar.gz | $(am__untar) ;;\ - *.tar.bz2*) \ - bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ - *.tar.lz*) \ - lzip -dc $(distdir).tar.lz | $(am__untar) ;;\ - *.tar.xz*) \ - xz -dc $(distdir).tar.xz | $(am__untar) ;;\ - *.tar.Z*) \ - uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ - *.shar.gz*) \ - eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).shar.gz | unshar ;;\ - *.zip*) \ - unzip $(distdir).zip ;;\ - *.tar.zst*) \ - zstd -dc $(distdir).tar.zst | $(am__untar) ;;\ - esac - chmod -R a-w $(distdir) - chmod u+w $(distdir) - mkdir $(distdir)/_build $(distdir)/_build/sub $(distdir)/_inst - chmod a-w $(distdir) - test -d $(distdir)/_build || exit 0; \ - dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ - && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ - && am__cwd=`pwd` \ - && $(am__cd) $(distdir)/_build/sub \ - && ../../configure \ - $(AM_DISTCHECK_CONFIGURE_FLAGS) \ - $(DISTCHECK_CONFIGURE_FLAGS) \ - --srcdir=../.. --prefix="$$dc_install_base" \ - && $(MAKE) $(AM_MAKEFLAGS) \ - && $(MAKE) $(AM_MAKEFLAGS) $(AM_DISTCHECK_DVI_TARGET) \ - && $(MAKE) $(AM_MAKEFLAGS) check \ - && $(MAKE) $(AM_MAKEFLAGS) install \ - && $(MAKE) $(AM_MAKEFLAGS) installcheck \ - && $(MAKE) $(AM_MAKEFLAGS) uninstall \ - && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ - distuninstallcheck \ - && chmod -R a-w "$$dc_install_base" \ - && ({ \ - (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ - distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ - } || { rm -rf "$$dc_destdir"; exit 1; }) \ - && rm -rf "$$dc_destdir" \ - && $(MAKE) $(AM_MAKEFLAGS) dist \ - && rm -rf $(DIST_ARCHIVES) \ - && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ - && cd "$$am__cwd" \ - || exit 1 - $(am__post_remove_distdir) - @(echo "$(distdir) archives ready for distribution: "; \ - list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ - sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' -distuninstallcheck: - @test -n '$(distuninstallcheck_dir)' || { \ - echo 'ERROR: trying to run $@ with an empty' \ - '$$(distuninstallcheck_dir)' >&2; \ - exit 1; \ - }; \ - $(am__cd) '$(distuninstallcheck_dir)' || { \ - echo 'ERROR: cannot chdir into $(distuninstallcheck_dir)' >&2; \ - exit 1; \ - }; \ - test `$(am__distuninstallcheck_listfiles) | wc -l` -eq 0 \ - || { echo "ERROR: files left after uninstall:" ; \ - if test -n "$(DESTDIR)"; then \ - echo " (check DESTDIR support)"; \ - fi ; \ - $(distuninstallcheck_listfiles) ; \ - exit 1; } >&2 -distcleancheck: distclean - @if test '$(srcdir)' = . ; then \ - echo "ERROR: distcleancheck can only run from a VPATH build" ; \ - exit 1 ; \ - fi - @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ - || { echo "ERROR: files left in build directory after distclean:" ; \ - $(distcleancheck_listfiles) ; \ - exit 1; } >&2 -check-am: all-am -check: check-recursive -all-am: Makefile $(DATA) config.h -installdirs: installdirs-recursive -installdirs-am: - for dir in "$(DESTDIR)$(docdir)"; do \ - test -z "$$dir" || $(MKDIR_P) "$$dir"; \ - done -install: install-recursive -install-exec: install-exec-recursive -install-data: install-data-recursive -uninstall: uninstall-recursive - -install-am: all-am - @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am - -installcheck: installcheck-recursive -install-strip: - if test -z '$(STRIP)'; then \ - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - install; \ - else \ - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ - fi -mostlyclean-generic: - -clean-generic: - -distclean-generic: - -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) - -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) - -maintainer-clean-generic: - @echo "This command is intended for maintainers to use" - @echo "it deletes files that may require special tools to rebuild." -clean: clean-recursive - -clean-am: clean-generic mostlyclean-am - -distclean: distclean-recursive - -rm -f $(am__CONFIG_DISTCLEAN_FILES) - -rm -f Makefile -distclean-am: clean-am distclean-generic distclean-hdr distclean-tags - -dvi: dvi-recursive - -dvi-am: - -html: html-recursive - -html-am: - -info: info-recursive - -info-am: - -install-data-am: install-dist_docDATA - -install-dvi: install-dvi-recursive - -install-dvi-am: - -install-exec-am: - -install-html: install-html-recursive - -install-html-am: - -install-info: install-info-recursive - -install-info-am: - -install-man: - -install-pdf: install-pdf-recursive - -install-pdf-am: - -install-ps: install-ps-recursive - -install-ps-am: - -installcheck-am: - -maintainer-clean: maintainer-clean-recursive - -rm -f $(am__CONFIG_DISTCLEAN_FILES) - -rm -rf $(top_srcdir)/autom4te.cache - -rm -f Makefile -maintainer-clean-am: distclean-am maintainer-clean-generic - -mostlyclean: mostlyclean-recursive - -mostlyclean-am: mostlyclean-generic - -pdf: pdf-recursive - -pdf-am: - -ps: ps-recursive - -ps-am: - -uninstall-am: uninstall-dist_docDATA - -.MAKE: $(am__recursive_targets) all install-am install-strip - -.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am \ - am--refresh check check-am clean clean-cscope clean-generic \ - cscope cscopelist-am ctags ctags-am dist dist-all dist-bzip2 \ - dist-gzip dist-lzip dist-shar dist-tarZ dist-xz dist-zip \ - dist-zstd distcheck distclean distclean-generic distclean-hdr \ - distclean-tags distcleancheck distdir distuninstallcheck dvi \ - dvi-am html html-am info info-am install install-am \ - install-data install-data-am install-dist_docDATA install-dvi \ - install-dvi-am install-exec install-exec-am install-html \ - install-html-am install-info install-info-am install-man \ - install-pdf install-pdf-am install-ps install-ps-am \ - install-strip installcheck installcheck-am installdirs \ - installdirs-am maintainer-clean maintainer-clean-generic \ - mostlyclean mostlyclean-generic pdf pdf-am ps ps-am tags \ - tags-am uninstall uninstall-am uninstall-dist_docDATA - -.PRECIOUS: Makefile - - -# Tell versions [3.59,3.63) of GNU make to not export all variables. -# Otherwise a system limit (for SysV at least) may be exceeded. -.NOEXPORT: diff --git a/c/consumer-verification/README.md b/c/consumer-verification/README.md deleted file mode 100755 index 8b4f834b7..000000000 --- a/c/consumer-verification/README.md +++ /dev/null @@ -1,95 +0,0 @@ -## Example use of the rust verification and mock server libraries from C. - -Before you can run the C examples, the pact ffi DLL and header needs to be built using `cargo build` and `cbindgen` -in the `rust/pact_ffi` directory. - -### Install libcurl - -The C example uses libcurl, so the development library needs to be installed. On Ubuntu, you can use apt to install it. - - $ sudo apt-get install libcurl4-openssl-dev - -### configure/make dance - -Next, for Linux and OSX, do the standard configure and make series of steps. - - $ aclocal - $ autoconf - $ automake --add-missing - $ ./configure - checking for a BSD-compatible install... /usr/bin/install -c - checking whether build environment is sane... yes - checking for a thread-safe mkdir -p... /bin/mkdir -p - checking for gawk... no - checking for mawk... mawk - checking whether make sets $(MAKE)... yes - checking whether make supports nested variables... yes - checking for gcc... gcc - checking whether the C compiler works... yes - checking for C compiler default output file name... a.out - checking for suffix of executables... - checking whether we are cross compiling... no - checking for suffix of object files... o - checking whether we are using the GNU C compiler... yes - checking whether gcc accepts -g... yes - checking for gcc option to accept ISO C89... none needed - checking whether gcc understands -c and -o together... yes - checking for style of include used by make... GNU - checking dependency style of gcc... gcc3 - checking for gawk... (cached) mawk - checking for curl-config... /usr/bin/curl-config - checking for the version of libcurl... 7.47.0 - checking whether libcurl is usable... yes - checking for curl_free... yes - checking that generated files are newer than configure... done - configure: creating ./config.status - config.status: creating Makefile - config.status: creating src/Makefile - config.status: creating config.h - config.status: config.h is unchanged - config.status: executing depfiles commands - - $ make - make all-recursive - make[1]: Entering directory '/home/ronald/Development/pact-reference/c/consumer-verification' - Making all in src - make[2]: Entering directory '/home/ronald/Development/pact-reference/c/consumer-verification/src' - gcc -DHAVE_CONFIG_H -I. -I.. -g -O2 -MT main.o -MD -MP -MF .deps/main.Tpo -c -o main.o main.c - mv -f .deps/main.Tpo .deps/main.Po - gcc -g -O2 -o consumer-verification main.o -L/usr/lib/x86_64-linux-gnu -lcurl -ldl - make[2]: Leaving directory '/home/ronald/Development/pact-reference/c/consumer-verification/src' - make[2]: Entering directory '/home/ronald/Development/pact-reference/c/consumer-verification' - make[2]: Leaving directory '/home/ronald/Development/pact-reference/c/consumer-verification' - make[1]: Leaving directory '/home/ronald/Development/pact-reference/c/consumer-verification' - -Now you have an executable `src/consumer-verification` that links to the pact_mock_server library. - -## Running the tests - -There are two tests. The basic test expects all requests to the verified, and the error test where there should be -validation errors. The src/consumer-verification executable takes 2 parameters: the test to run (basic or error) and the -paths to the shared libraries. - - $ src/consumer-verification basic ../../rust/target/debug/libpact_ffi.so - This is consumer-verification 0.0.0. - Running basic pact test - Mock server started on port 39263 - Executing request against http://localhost:39263/mallory?name=ron&status=good - * Trying 127.0.0.1... - * Connected to localhost (127.0.0.1) port 39263 (#0) - > GET /mallory?name=ron&status=good HTTP/1.1 - Host: localhost:39263 - Accept: */* - - < HTTP/1.1 200 OK - < Date: Mon, 18 Jul 2016 06:22:24 GMT - < Content-Type: text/html - < Access-Control-Allow-Origin: * - < Content-Length: 28 - < - * Connection #0 to host localhost left intact - "That is some good Mallory." - - OK: Mock server verified all requests, as expected - -On OSX, the shared object would be `libpact_ffi.dylib` and on windows `libpact_ffi.dll`. diff --git a/c/consumer-verification/aclocal.m4 b/c/consumer-verification/aclocal.m4 deleted file mode 100644 index 6715ee0ea..000000000 --- a/c/consumer-verification/aclocal.m4 +++ /dev/null @@ -1,1132 +0,0 @@ -# generated automatically by aclocal 1.16.3 -*- Autoconf -*- - -# Copyright (C) 1996-2020 Free Software Foundation, Inc. - -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY, to the extent permitted by law; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. - -m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])]) -m4_ifndef([AC_AUTOCONF_VERSION], - [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl -m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.69],, -[m4_warning([this file was generated for autoconf 2.69. -You have another version of autoconf. It may work, but is not guaranteed to. -If you have problems, you may need to regenerate the build system entirely. -To do so, use the procedure documented by the package, typically 'autoreconf'.])]) - -# Copyright (C) 2002-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_AUTOMAKE_VERSION(VERSION) -# ---------------------------- -# Automake X.Y traces this macro to ensure aclocal.m4 has been -# generated from the m4 files accompanying Automake X.Y. -# (This private macro should not be called outside this file.) -AC_DEFUN([AM_AUTOMAKE_VERSION], -[am__api_version='1.16' -dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to -dnl require some minimum version. Point them to the right macro. -m4_if([$1], [1.16.3], [], - [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl -]) - -# _AM_AUTOCONF_VERSION(VERSION) -# ----------------------------- -# aclocal traces this macro to find the Autoconf version. -# This is a private macro too. Using m4_define simplifies -# the logic in aclocal, which can simply ignore this definition. -m4_define([_AM_AUTOCONF_VERSION], []) - -# AM_SET_CURRENT_AUTOMAKE_VERSION -# ------------------------------- -# Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. -# This function is AC_REQUIREd by AM_INIT_AUTOMAKE. -AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], -[AM_AUTOMAKE_VERSION([1.16.3])dnl -m4_ifndef([AC_AUTOCONF_VERSION], - [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl -_AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) - -# AM_AUX_DIR_EXPAND -*- Autoconf -*- - -# Copyright (C) 2001-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets -# $ac_aux_dir to '$srcdir/foo'. In other projects, it is set to -# '$srcdir', '$srcdir/..', or '$srcdir/../..'. -# -# Of course, Automake must honor this variable whenever it calls a -# tool from the auxiliary directory. The problem is that $srcdir (and -# therefore $ac_aux_dir as well) can be either absolute or relative, -# depending on how configure is run. This is pretty annoying, since -# it makes $ac_aux_dir quite unusable in subdirectories: in the top -# source directory, any form will work fine, but in subdirectories a -# relative path needs to be adjusted first. -# -# $ac_aux_dir/missing -# fails when called from a subdirectory if $ac_aux_dir is relative -# $top_srcdir/$ac_aux_dir/missing -# fails if $ac_aux_dir is absolute, -# fails when called from a subdirectory in a VPATH build with -# a relative $ac_aux_dir -# -# The reason of the latter failure is that $top_srcdir and $ac_aux_dir -# are both prefixed by $srcdir. In an in-source build this is usually -# harmless because $srcdir is '.', but things will broke when you -# start a VPATH build or use an absolute $srcdir. -# -# So we could use something similar to $top_srcdir/$ac_aux_dir/missing, -# iff we strip the leading $srcdir from $ac_aux_dir. That would be: -# am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` -# and then we would define $MISSING as -# MISSING="\${SHELL} $am_aux_dir/missing" -# This will work as long as MISSING is not called from configure, because -# unfortunately $(top_srcdir) has no meaning in configure. -# However there are other variables, like CC, which are often used in -# configure, and could therefore not use this "fixed" $ac_aux_dir. -# -# Another solution, used here, is to always expand $ac_aux_dir to an -# absolute PATH. The drawback is that using absolute paths prevent a -# configured tree to be moved without reconfiguration. - -AC_DEFUN([AM_AUX_DIR_EXPAND], -[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl -# Expand $ac_aux_dir to an absolute path. -am_aux_dir=`cd "$ac_aux_dir" && pwd` -]) - -# AM_CONDITIONAL -*- Autoconf -*- - -# Copyright (C) 1997-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_CONDITIONAL(NAME, SHELL-CONDITION) -# ------------------------------------- -# Define a conditional. -AC_DEFUN([AM_CONDITIONAL], -[AC_PREREQ([2.52])dnl - m4_if([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], - [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl -AC_SUBST([$1_TRUE])dnl -AC_SUBST([$1_FALSE])dnl -_AM_SUBST_NOTMAKE([$1_TRUE])dnl -_AM_SUBST_NOTMAKE([$1_FALSE])dnl -m4_define([_AM_COND_VALUE_$1], [$2])dnl -if $2; then - $1_TRUE= - $1_FALSE='#' -else - $1_TRUE='#' - $1_FALSE= -fi -AC_CONFIG_COMMANDS_PRE( -[if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then - AC_MSG_ERROR([[conditional "$1" was never defined. -Usually this means the macro was only invoked conditionally.]]) -fi])]) - -# Copyright (C) 1999-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - - -# There are a few dirty hacks below to avoid letting 'AC_PROG_CC' be -# written in clear, in which case automake, when reading aclocal.m4, -# will think it sees a *use*, and therefore will trigger all it's -# C support machinery. Also note that it means that autoscan, seeing -# CC etc. in the Makefile, will ask for an AC_PROG_CC use... - - -# _AM_DEPENDENCIES(NAME) -# ---------------------- -# See how the compiler implements dependency checking. -# NAME is "CC", "CXX", "OBJC", "OBJCXX", "UPC", or "GJC". -# We try a few techniques and use that to set a single cache variable. -# -# We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was -# modified to invoke _AM_DEPENDENCIES(CC); we would have a circular -# dependency, and given that the user is not expected to run this macro, -# just rely on AC_PROG_CC. -AC_DEFUN([_AM_DEPENDENCIES], -[AC_REQUIRE([AM_SET_DEPDIR])dnl -AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl -AC_REQUIRE([AM_MAKE_INCLUDE])dnl -AC_REQUIRE([AM_DEP_TRACK])dnl - -m4_if([$1], [CC], [depcc="$CC" am_compiler_list=], - [$1], [CXX], [depcc="$CXX" am_compiler_list=], - [$1], [OBJC], [depcc="$OBJC" am_compiler_list='gcc3 gcc'], - [$1], [OBJCXX], [depcc="$OBJCXX" am_compiler_list='gcc3 gcc'], - [$1], [UPC], [depcc="$UPC" am_compiler_list=], - [$1], [GCJ], [depcc="$GCJ" am_compiler_list='gcc3 gcc'], - [depcc="$$1" am_compiler_list=]) - -AC_CACHE_CHECK([dependency style of $depcc], - [am_cv_$1_dependencies_compiler_type], -[if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then - # We make a subdir and do the tests there. Otherwise we can end up - # making bogus files that we don't know about and never remove. For - # instance it was reported that on HP-UX the gcc test will end up - # making a dummy file named 'D' -- because '-MD' means "put the output - # in D". - rm -rf conftest.dir - mkdir conftest.dir - # Copy depcomp to subdir because otherwise we won't find it if we're - # using a relative directory. - cp "$am_depcomp" conftest.dir - cd conftest.dir - # We will build objects and dependencies in a subdirectory because - # it helps to detect inapplicable dependency modes. For instance - # both Tru64's cc and ICC support -MD to output dependencies as a - # side effect of compilation, but ICC will put the dependencies in - # the current directory while Tru64 will put them in the object - # directory. - mkdir sub - - am_cv_$1_dependencies_compiler_type=none - if test "$am_compiler_list" = ""; then - am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp` - fi - am__universal=false - m4_case([$1], [CC], - [case " $depcc " in #( - *\ -arch\ *\ -arch\ *) am__universal=true ;; - esac], - [CXX], - [case " $depcc " in #( - *\ -arch\ *\ -arch\ *) am__universal=true ;; - esac]) - - for depmode in $am_compiler_list; do - # Setup a source with many dependencies, because some compilers - # like to wrap large dependency lists on column 80 (with \), and - # we should not choose a depcomp mode which is confused by this. - # - # We need to recreate these files for each test, as the compiler may - # overwrite some of them when testing with obscure command lines. - # This happens at least with the AIX C compiler. - : > sub/conftest.c - for i in 1 2 3 4 5 6; do - echo '#include "conftst'$i'.h"' >> sub/conftest.c - # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with - # Solaris 10 /bin/sh. - echo '/* dummy */' > sub/conftst$i.h - done - echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf - - # We check with '-c' and '-o' for the sake of the "dashmstdout" - # mode. It turns out that the SunPro C++ compiler does not properly - # handle '-M -o', and we need to detect this. Also, some Intel - # versions had trouble with output in subdirs. - am__obj=sub/conftest.${OBJEXT-o} - am__minus_obj="-o $am__obj" - case $depmode in - gcc) - # This depmode causes a compiler race in universal mode. - test "$am__universal" = false || continue - ;; - nosideeffect) - # After this tag, mechanisms are not by side-effect, so they'll - # only be used when explicitly requested. - if test "x$enable_dependency_tracking" = xyes; then - continue - else - break - fi - ;; - msvc7 | msvc7msys | msvisualcpp | msvcmsys) - # This compiler won't grok '-c -o', but also, the minuso test has - # not run yet. These depmodes are late enough in the game, and - # so weak that their functioning should not be impacted. - am__obj=conftest.${OBJEXT-o} - am__minus_obj= - ;; - none) break ;; - esac - if depmode=$depmode \ - source=sub/conftest.c object=$am__obj \ - depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ - $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ - >/dev/null 2>conftest.err && - grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && - grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && - grep $am__obj sub/conftest.Po > /dev/null 2>&1 && - ${MAKE-make} -s -f confmf > /dev/null 2>&1; then - # icc doesn't choke on unknown options, it will just issue warnings - # or remarks (even with -Werror). So we grep stderr for any message - # that says an option was ignored or not supported. - # When given -MP, icc 7.0 and 7.1 complain thusly: - # icc: Command line warning: ignoring option '-M'; no argument required - # The diagnosis changed in icc 8.0: - # icc: Command line remark: option '-MP' not supported - if (grep 'ignoring option' conftest.err || - grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else - am_cv_$1_dependencies_compiler_type=$depmode - break - fi - fi - done - - cd .. - rm -rf conftest.dir -else - am_cv_$1_dependencies_compiler_type=none -fi -]) -AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type]) -AM_CONDITIONAL([am__fastdep$1], [ - test "x$enable_dependency_tracking" != xno \ - && test "$am_cv_$1_dependencies_compiler_type" = gcc3]) -]) - - -# AM_SET_DEPDIR -# ------------- -# Choose a directory name for dependency files. -# This macro is AC_REQUIREd in _AM_DEPENDENCIES. -AC_DEFUN([AM_SET_DEPDIR], -[AC_REQUIRE([AM_SET_LEADING_DOT])dnl -AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl -]) - - -# AM_DEP_TRACK -# ------------ -AC_DEFUN([AM_DEP_TRACK], -[AC_ARG_ENABLE([dependency-tracking], [dnl -AS_HELP_STRING( - [--enable-dependency-tracking], - [do not reject slow dependency extractors]) -AS_HELP_STRING( - [--disable-dependency-tracking], - [speeds up one-time build])]) -if test "x$enable_dependency_tracking" != xno; then - am_depcomp="$ac_aux_dir/depcomp" - AMDEPBACKSLASH='\' - am__nodep='_no' -fi -AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno]) -AC_SUBST([AMDEPBACKSLASH])dnl -_AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl -AC_SUBST([am__nodep])dnl -_AM_SUBST_NOTMAKE([am__nodep])dnl -]) - -# Generate code to set up dependency tracking. -*- Autoconf -*- - -# Copyright (C) 1999-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# _AM_OUTPUT_DEPENDENCY_COMMANDS -# ------------------------------ -AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS], -[{ - # Older Autoconf quotes --file arguments for eval, but not when files - # are listed without --file. Let's play safe and only enable the eval - # if we detect the quoting. - # TODO: see whether this extra hack can be removed once we start - # requiring Autoconf 2.70 or later. - AS_CASE([$CONFIG_FILES], - [*\'*], [eval set x "$CONFIG_FILES"], - [*], [set x $CONFIG_FILES]) - shift - # Used to flag and report bootstrapping failures. - am_rc=0 - for am_mf - do - # Strip MF so we end up with the name of the file. - am_mf=`AS_ECHO(["$am_mf"]) | sed -e 's/:.*$//'` - # Check whether this is an Automake generated Makefile which includes - # dependency-tracking related rules and includes. - # Grep'ing the whole file directly is not great: AIX grep has a line - # limit of 2048, but all sed's we know have understand at least 4000. - sed -n 's,^am--depfiles:.*,X,p' "$am_mf" | grep X >/dev/null 2>&1 \ - || continue - am_dirpart=`AS_DIRNAME(["$am_mf"])` - am_filepart=`AS_BASENAME(["$am_mf"])` - AM_RUN_LOG([cd "$am_dirpart" \ - && sed -e '/# am--include-marker/d' "$am_filepart" \ - | $MAKE -f - am--depfiles]) || am_rc=$? - done - if test $am_rc -ne 0; then - AC_MSG_FAILURE([Something went wrong bootstrapping makefile fragments - for automatic dependency tracking. If GNU make was not used, consider - re-running the configure script with MAKE="gmake" (or whatever is - necessary). You can also try re-running configure with the - '--disable-dependency-tracking' option to at least be able to build - the package (albeit without support for automatic dependency tracking).]) - fi - AS_UNSET([am_dirpart]) - AS_UNSET([am_filepart]) - AS_UNSET([am_mf]) - AS_UNSET([am_rc]) - rm -f conftest-deps.mk -} -])# _AM_OUTPUT_DEPENDENCY_COMMANDS - - -# AM_OUTPUT_DEPENDENCY_COMMANDS -# ----------------------------- -# This macro should only be invoked once -- use via AC_REQUIRE. -# -# This code is only required when automatic dependency tracking is enabled. -# This creates each '.Po' and '.Plo' makefile fragment that we'll need in -# order to bootstrap the dependency handling code. -AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], -[AC_CONFIG_COMMANDS([depfiles], - [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS], - [AMDEP_TRUE="$AMDEP_TRUE" MAKE="${MAKE-make}"])]) - -# Do all the work for Automake. -*- Autoconf -*- - -# Copyright (C) 1996-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This macro actually does too much. Some checks are only needed if -# your package does certain things. But this isn't really a big deal. - -dnl Redefine AC_PROG_CC to automatically invoke _AM_PROG_CC_C_O. -m4_define([AC_PROG_CC], -m4_defn([AC_PROG_CC]) -[_AM_PROG_CC_C_O -]) - -# AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) -# AM_INIT_AUTOMAKE([OPTIONS]) -# ----------------------------------------------- -# The call with PACKAGE and VERSION arguments is the old style -# call (pre autoconf-2.50), which is being phased out. PACKAGE -# and VERSION should now be passed to AC_INIT and removed from -# the call to AM_INIT_AUTOMAKE. -# We support both call styles for the transition. After -# the next Automake release, Autoconf can make the AC_INIT -# arguments mandatory, and then we can depend on a new Autoconf -# release and drop the old call support. -AC_DEFUN([AM_INIT_AUTOMAKE], -[AC_PREREQ([2.65])dnl -dnl Autoconf wants to disallow AM_ names. We explicitly allow -dnl the ones we care about. -m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl -AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl -AC_REQUIRE([AC_PROG_INSTALL])dnl -if test "`cd $srcdir && pwd`" != "`pwd`"; then - # Use -I$(srcdir) only when $(srcdir) != ., so that make's output - # is not polluted with repeated "-I." - AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl - # test to see if srcdir already configured - if test -f $srcdir/config.status; then - AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) - fi -fi - -# test whether we have cygpath -if test -z "$CYGPATH_W"; then - if (cygpath --version) >/dev/null 2>/dev/null; then - CYGPATH_W='cygpath -w' - else - CYGPATH_W=echo - fi -fi -AC_SUBST([CYGPATH_W]) - -# Define the identity of the package. -dnl Distinguish between old-style and new-style calls. -m4_ifval([$2], -[AC_DIAGNOSE([obsolete], - [$0: two- and three-arguments forms are deprecated.]) -m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl - AC_SUBST([PACKAGE], [$1])dnl - AC_SUBST([VERSION], [$2])], -[_AM_SET_OPTIONS([$1])dnl -dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. -m4_if( - m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]), - [ok:ok],, - [m4_fatal([AC_INIT should be called with package and version arguments])])dnl - AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl - AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl - -_AM_IF_OPTION([no-define],, -[AC_DEFINE_UNQUOTED([PACKAGE], ["$PACKAGE"], [Name of package]) - AC_DEFINE_UNQUOTED([VERSION], ["$VERSION"], [Version number of package])])dnl - -# Some tools Automake needs. -AC_REQUIRE([AM_SANITY_CHECK])dnl -AC_REQUIRE([AC_ARG_PROGRAM])dnl -AM_MISSING_PROG([ACLOCAL], [aclocal-${am__api_version}]) -AM_MISSING_PROG([AUTOCONF], [autoconf]) -AM_MISSING_PROG([AUTOMAKE], [automake-${am__api_version}]) -AM_MISSING_PROG([AUTOHEADER], [autoheader]) -AM_MISSING_PROG([MAKEINFO], [makeinfo]) -AC_REQUIRE([AM_PROG_INSTALL_SH])dnl -AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl -AC_REQUIRE([AC_PROG_MKDIR_P])dnl -# For better backward compatibility. To be removed once Automake 1.9.x -# dies out for good. For more background, see: -# -# -AC_SUBST([mkdir_p], ['$(MKDIR_P)']) -# We need awk for the "check" target (and possibly the TAP driver). The -# system "awk" is bad on some platforms. -AC_REQUIRE([AC_PROG_AWK])dnl -AC_REQUIRE([AC_PROG_MAKE_SET])dnl -AC_REQUIRE([AM_SET_LEADING_DOT])dnl -_AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], - [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], - [_AM_PROG_TAR([v7])])]) -_AM_IF_OPTION([no-dependencies],, -[AC_PROVIDE_IFELSE([AC_PROG_CC], - [_AM_DEPENDENCIES([CC])], - [m4_define([AC_PROG_CC], - m4_defn([AC_PROG_CC])[_AM_DEPENDENCIES([CC])])])dnl -AC_PROVIDE_IFELSE([AC_PROG_CXX], - [_AM_DEPENDENCIES([CXX])], - [m4_define([AC_PROG_CXX], - m4_defn([AC_PROG_CXX])[_AM_DEPENDENCIES([CXX])])])dnl -AC_PROVIDE_IFELSE([AC_PROG_OBJC], - [_AM_DEPENDENCIES([OBJC])], - [m4_define([AC_PROG_OBJC], - m4_defn([AC_PROG_OBJC])[_AM_DEPENDENCIES([OBJC])])])dnl -AC_PROVIDE_IFELSE([AC_PROG_OBJCXX], - [_AM_DEPENDENCIES([OBJCXX])], - [m4_define([AC_PROG_OBJCXX], - m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl -]) -AC_REQUIRE([AM_SILENT_RULES])dnl -dnl The testsuite driver may need to know about EXEEXT, so add the -dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This -dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below. -AC_CONFIG_COMMANDS_PRE(dnl -[m4_provide_if([_AM_COMPILER_EXEEXT], - [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl - -# POSIX will say in a future version that running "rm -f" with no argument -# is OK; and we want to be able to make that assumption in our Makefile -# recipes. So use an aggressive probe to check that the usage we want is -# actually supported "in the wild" to an acceptable degree. -# See automake bug#10828. -# To make any issue more visible, cause the running configure to be aborted -# by default if the 'rm' program in use doesn't match our expectations; the -# user can still override this though. -if rm -f && rm -fr && rm -rf; then : OK; else - cat >&2 <<'END' -Oops! - -Your 'rm' program seems unable to run without file operands specified -on the command line, even when the '-f' option is present. This is contrary -to the behaviour of most rm programs out there, and not conforming with -the upcoming POSIX standard: - -Please tell bug-automake@gnu.org about your system, including the value -of your $PATH and any error possibly output before this message. This -can help us improve future automake versions. - -END - if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then - echo 'Configuration will proceed anyway, since you have set the' >&2 - echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 - echo >&2 - else - cat >&2 <<'END' -Aborting the configuration process, to ensure you take notice of the issue. - -You can download and install GNU coreutils to get an 'rm' implementation -that behaves properly: . - -If you want to complete the configuration process using your problematic -'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM -to "yes", and re-run configure. - -END - AC_MSG_ERROR([Your 'rm' program is bad, sorry.]) - fi -fi -dnl The trailing newline in this macro's definition is deliberate, for -dnl backward compatibility and to allow trailing 'dnl'-style comments -dnl after the AM_INIT_AUTOMAKE invocation. See automake bug#16841. -]) - -dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion. Do not -dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further -dnl mangled by Autoconf and run in a shell conditional statement. -m4_define([_AC_COMPILER_EXEEXT], -m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) - -# When config.status generates a header, we must update the stamp-h file. -# This file resides in the same directory as the config header -# that is generated. The stamp files are numbered to have different names. - -# Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the -# loop where config.status creates the headers, so we can generate -# our stamp files there. -AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], -[# Compute $1's index in $config_headers. -_am_arg=$1 -_am_stamp_count=1 -for _am_header in $config_headers :; do - case $_am_header in - $_am_arg | $_am_arg:* ) - break ;; - * ) - _am_stamp_count=`expr $_am_stamp_count + 1` ;; - esac -done -echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) - -# Copyright (C) 2001-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_PROG_INSTALL_SH -# ------------------ -# Define $install_sh. -AC_DEFUN([AM_PROG_INSTALL_SH], -[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl -if test x"${install_sh+set}" != xset; then - case $am_aux_dir in - *\ * | *\ *) - install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; - *) - install_sh="\${SHELL} $am_aux_dir/install-sh" - esac -fi -AC_SUBST([install_sh])]) - -# Copyright (C) 2003-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# Check whether the underlying file-system supports filenames -# with a leading dot. For instance MS-DOS doesn't. -AC_DEFUN([AM_SET_LEADING_DOT], -[rm -rf .tst 2>/dev/null -mkdir .tst 2>/dev/null -if test -d .tst; then - am__leading_dot=. -else - am__leading_dot=_ -fi -rmdir .tst 2>/dev/null -AC_SUBST([am__leading_dot])]) - -# Check to see how 'make' treats includes. -*- Autoconf -*- - -# Copyright (C) 2001-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_MAKE_INCLUDE() -# ----------------- -# Check whether make has an 'include' directive that can support all -# the idioms we need for our automatic dependency tracking code. -AC_DEFUN([AM_MAKE_INCLUDE], -[AC_MSG_CHECKING([whether ${MAKE-make} supports the include directive]) -cat > confinc.mk << 'END' -am__doit: - @echo this is the am__doit target >confinc.out -.PHONY: am__doit -END -am__include="#" -am__quote= -# BSD make does it like this. -echo '.include "confinc.mk" # ignored' > confmf.BSD -# Other make implementations (GNU, Solaris 10, AIX) do it like this. -echo 'include confinc.mk # ignored' > confmf.GNU -_am_result=no -for s in GNU BSD; do - AM_RUN_LOG([${MAKE-make} -f confmf.$s && cat confinc.out]) - AS_CASE([$?:`cat confinc.out 2>/dev/null`], - ['0:this is the am__doit target'], - [AS_CASE([$s], - [BSD], [am__include='.include' am__quote='"'], - [am__include='include' am__quote=''])]) - if test "$am__include" != "#"; then - _am_result="yes ($s style)" - break - fi -done -rm -f confinc.* confmf.* -AC_MSG_RESULT([${_am_result}]) -AC_SUBST([am__include])]) -AC_SUBST([am__quote])]) - -# Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- - -# Copyright (C) 1997-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_MISSING_PROG(NAME, PROGRAM) -# ------------------------------ -AC_DEFUN([AM_MISSING_PROG], -[AC_REQUIRE([AM_MISSING_HAS_RUN]) -$1=${$1-"${am_missing_run}$2"} -AC_SUBST($1)]) - -# AM_MISSING_HAS_RUN -# ------------------ -# Define MISSING if not defined so far and test if it is modern enough. -# If it is, set am_missing_run to use it, otherwise, to nothing. -AC_DEFUN([AM_MISSING_HAS_RUN], -[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl -AC_REQUIRE_AUX_FILE([missing])dnl -if test x"${MISSING+set}" != xset; then - MISSING="\${SHELL} '$am_aux_dir/missing'" -fi -# Use eval to expand $SHELL -if eval "$MISSING --is-lightweight"; then - am_missing_run="$MISSING " -else - am_missing_run= - AC_MSG_WARN(['missing' script is too old or missing]) -fi -]) - -# Helper functions for option handling. -*- Autoconf -*- - -# Copyright (C) 2001-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# _AM_MANGLE_OPTION(NAME) -# ----------------------- -AC_DEFUN([_AM_MANGLE_OPTION], -[[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) - -# _AM_SET_OPTION(NAME) -# -------------------- -# Set option NAME. Presently that only means defining a flag for this option. -AC_DEFUN([_AM_SET_OPTION], -[m4_define(_AM_MANGLE_OPTION([$1]), [1])]) - -# _AM_SET_OPTIONS(OPTIONS) -# ------------------------ -# OPTIONS is a space-separated list of Automake options. -AC_DEFUN([_AM_SET_OPTIONS], -[m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) - -# _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) -# ------------------------------------------- -# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. -AC_DEFUN([_AM_IF_OPTION], -[m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) - -# Copyright (C) 1999-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# _AM_PROG_CC_C_O -# --------------- -# Like AC_PROG_CC_C_O, but changed for automake. We rewrite AC_PROG_CC -# to automatically call this. -AC_DEFUN([_AM_PROG_CC_C_O], -[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl -AC_REQUIRE_AUX_FILE([compile])dnl -AC_LANG_PUSH([C])dnl -AC_CACHE_CHECK( - [whether $CC understands -c and -o together], - [am_cv_prog_cc_c_o], - [AC_LANG_CONFTEST([AC_LANG_PROGRAM([])]) - # Make sure it works both with $CC and with simple cc. - # Following AC_PROG_CC_C_O, we do the test twice because some - # compilers refuse to overwrite an existing .o file with -o, - # though they will create one. - am_cv_prog_cc_c_o=yes - for am_i in 1 2; do - if AM_RUN_LOG([$CC -c conftest.$ac_ext -o conftest2.$ac_objext]) \ - && test -f conftest2.$ac_objext; then - : OK - else - am_cv_prog_cc_c_o=no - break - fi - done - rm -f core conftest* - unset am_i]) -if test "$am_cv_prog_cc_c_o" != yes; then - # Losing compiler, so override with the script. - # FIXME: It is wrong to rewrite CC. - # But if we don't then we get into trouble of one sort or another. - # A longer-term fix would be to have automake use am__CC in this case, - # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" - CC="$am_aux_dir/compile $CC" -fi -AC_LANG_POP([C])]) - -# For backward compatibility. -AC_DEFUN_ONCE([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC])]) - -# Copyright (C) 2001-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_RUN_LOG(COMMAND) -# ------------------- -# Run COMMAND, save the exit status in ac_status, and log it. -# (This has been adapted from Autoconf's _AC_RUN_LOG macro.) -AC_DEFUN([AM_RUN_LOG], -[{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD - ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD - (exit $ac_status); }]) - -# Check to make sure that the build environment is sane. -*- Autoconf -*- - -# Copyright (C) 1996-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_SANITY_CHECK -# --------------- -AC_DEFUN([AM_SANITY_CHECK], -[AC_MSG_CHECKING([whether build environment is sane]) -# Reject unsafe characters in $srcdir or the absolute working directory -# name. Accept space and tab only in the latter. -am_lf=' -' -case `pwd` in - *[[\\\"\#\$\&\'\`$am_lf]]*) - AC_MSG_ERROR([unsafe absolute working directory name]);; -esac -case $srcdir in - *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) - AC_MSG_ERROR([unsafe srcdir value: '$srcdir']);; -esac - -# Do 'set' in a subshell so we don't clobber the current shell's -# arguments. Must try -L first in case configure is actually a -# symlink; some systems play weird games with the mod time of symlinks -# (eg FreeBSD returns the mod time of the symlink's containing -# directory). -if ( - am_has_slept=no - for am_try in 1 2; do - echo "timestamp, slept: $am_has_slept" > conftest.file - set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` - if test "$[*]" = "X"; then - # -L didn't work. - set X `ls -t "$srcdir/configure" conftest.file` - fi - if test "$[*]" != "X $srcdir/configure conftest.file" \ - && test "$[*]" != "X conftest.file $srcdir/configure"; then - - # If neither matched, then we have a broken ls. This can happen - # if, for instance, CONFIG_SHELL is bash and it inherits a - # broken ls alias from the environment. This has actually - # happened. Such a system could not be considered "sane". - AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken - alias in your environment]) - fi - if test "$[2]" = conftest.file || test $am_try -eq 2; then - break - fi - # Just in case. - sleep 1 - am_has_slept=yes - done - test "$[2]" = conftest.file - ) -then - # Ok. - : -else - AC_MSG_ERROR([newly created file is older than distributed files! -Check your system clock]) -fi -AC_MSG_RESULT([yes]) -# If we didn't sleep, we still need to ensure time stamps of config.status and -# generated files are strictly newer. -am_sleep_pid= -if grep 'slept: no' conftest.file >/dev/null 2>&1; then - ( sleep 1 ) & - am_sleep_pid=$! -fi -AC_CONFIG_COMMANDS_PRE( - [AC_MSG_CHECKING([that generated files are newer than configure]) - if test -n "$am_sleep_pid"; then - # Hide warnings about reused PIDs. - wait $am_sleep_pid 2>/dev/null - fi - AC_MSG_RESULT([done])]) -rm -f conftest.file -]) - -# Copyright (C) 2009-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_SILENT_RULES([DEFAULT]) -# -------------------------- -# Enable less verbose build rules; with the default set to DEFAULT -# ("yes" being less verbose, "no" or empty being verbose). -AC_DEFUN([AM_SILENT_RULES], -[AC_ARG_ENABLE([silent-rules], [dnl -AS_HELP_STRING( - [--enable-silent-rules], - [less verbose build output (undo: "make V=1")]) -AS_HELP_STRING( - [--disable-silent-rules], - [verbose build output (undo: "make V=0")])dnl -]) -case $enable_silent_rules in @%:@ ((( - yes) AM_DEFAULT_VERBOSITY=0;; - no) AM_DEFAULT_VERBOSITY=1;; - *) AM_DEFAULT_VERBOSITY=m4_if([$1], [yes], [0], [1]);; -esac -dnl -dnl A few 'make' implementations (e.g., NonStop OS and NextStep) -dnl do not support nested variable expansions. -dnl See automake bug#9928 and bug#10237. -am_make=${MAKE-make} -AC_CACHE_CHECK([whether $am_make supports nested variables], - [am_cv_make_support_nested_variables], - [if AS_ECHO([['TRUE=$(BAR$(V)) -BAR0=false -BAR1=true -V=1 -am__doit: - @$(TRUE) -.PHONY: am__doit']]) | $am_make -f - >/dev/null 2>&1; then - am_cv_make_support_nested_variables=yes -else - am_cv_make_support_nested_variables=no -fi]) -if test $am_cv_make_support_nested_variables = yes; then - dnl Using '$V' instead of '$(V)' breaks IRIX make. - AM_V='$(V)' - AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' -else - AM_V=$AM_DEFAULT_VERBOSITY - AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY -fi -AC_SUBST([AM_V])dnl -AM_SUBST_NOTMAKE([AM_V])dnl -AC_SUBST([AM_DEFAULT_V])dnl -AM_SUBST_NOTMAKE([AM_DEFAULT_V])dnl -AC_SUBST([AM_DEFAULT_VERBOSITY])dnl -AM_BACKSLASH='\' -AC_SUBST([AM_BACKSLASH])dnl -_AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl -]) - -# Copyright (C) 2001-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_PROG_INSTALL_STRIP -# --------------------- -# One issue with vendor 'install' (even GNU) is that you can't -# specify the program used to strip binaries. This is especially -# annoying in cross-compiling environments, where the build's strip -# is unlikely to handle the host's binaries. -# Fortunately install-sh will honor a STRIPPROG variable, so we -# always use install-sh in "make install-strip", and initialize -# STRIPPROG with the value of the STRIP variable (set by the user). -AC_DEFUN([AM_PROG_INSTALL_STRIP], -[AC_REQUIRE([AM_PROG_INSTALL_SH])dnl -# Installed binaries are usually stripped using 'strip' when the user -# run "make install-strip". However 'strip' might not be the right -# tool to use in cross-compilation environments, therefore Automake -# will honor the 'STRIP' environment variable to overrule this program. -dnl Don't test for $cross_compiling = yes, because it might be 'maybe'. -if test "$cross_compiling" != no; then - AC_CHECK_TOOL([STRIP], [strip], :) -fi -INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" -AC_SUBST([INSTALL_STRIP_PROGRAM])]) - -# Copyright (C) 2006-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# _AM_SUBST_NOTMAKE(VARIABLE) -# --------------------------- -# Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. -# This macro is traced by Automake. -AC_DEFUN([_AM_SUBST_NOTMAKE]) - -# AM_SUBST_NOTMAKE(VARIABLE) -# -------------------------- -# Public sister of _AM_SUBST_NOTMAKE. -AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)]) - -# Check how to create a tarball. -*- Autoconf -*- - -# Copyright (C) 2004-2020 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# _AM_PROG_TAR(FORMAT) -# -------------------- -# Check how to create a tarball in format FORMAT. -# FORMAT should be one of 'v7', 'ustar', or 'pax'. -# -# Substitute a variable $(am__tar) that is a command -# writing to stdout a FORMAT-tarball containing the directory -# $tardir. -# tardir=directory && $(am__tar) > result.tar -# -# Substitute a variable $(am__untar) that extract such -# a tarball read from stdin. -# $(am__untar) < result.tar -# -AC_DEFUN([_AM_PROG_TAR], -[# Always define AMTAR for backward compatibility. Yes, it's still used -# in the wild :-( We should find a proper way to deprecate it ... -AC_SUBST([AMTAR], ['$${TAR-tar}']) - -# We'll loop over all known methods to create a tar archive until one works. -_am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' - -m4_if([$1], [v7], - [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], - - [m4_case([$1], - [ustar], - [# The POSIX 1988 'ustar' format is defined with fixed-size fields. - # There is notably a 21 bits limit for the UID and the GID. In fact, - # the 'pax' utility can hang on bigger UID/GID (see automake bug#8343 - # and bug#13588). - am_max_uid=2097151 # 2^21 - 1 - am_max_gid=$am_max_uid - # The $UID and $GID variables are not portable, so we need to resort - # to the POSIX-mandated id(1) utility. Errors in the 'id' calls - # below are definitely unexpected, so allow the users to see them - # (that is, avoid stderr redirection). - am_uid=`id -u || echo unknown` - am_gid=`id -g || echo unknown` - AC_MSG_CHECKING([whether UID '$am_uid' is supported by ustar format]) - if test $am_uid -le $am_max_uid; then - AC_MSG_RESULT([yes]) - else - AC_MSG_RESULT([no]) - _am_tools=none - fi - AC_MSG_CHECKING([whether GID '$am_gid' is supported by ustar format]) - if test $am_gid -le $am_max_gid; then - AC_MSG_RESULT([yes]) - else - AC_MSG_RESULT([no]) - _am_tools=none - fi], - - [pax], - [], - - [m4_fatal([Unknown tar format])]) - - AC_MSG_CHECKING([how to create a $1 tar archive]) - - # Go ahead even if we have the value already cached. We do so because we - # need to set the values for the 'am__tar' and 'am__untar' variables. - _am_tools=${am_cv_prog_tar_$1-$_am_tools} - - for _am_tool in $_am_tools; do - case $_am_tool in - gnutar) - for _am_tar in tar gnutar gtar; do - AM_RUN_LOG([$_am_tar --version]) && break - done - am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' - am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' - am__untar="$_am_tar -xf -" - ;; - plaintar) - # Must skip GNU tar: if it does not support --format= it doesn't create - # ustar tarball either. - (tar --version) >/dev/null 2>&1 && continue - am__tar='tar chf - "$$tardir"' - am__tar_='tar chf - "$tardir"' - am__untar='tar xf -' - ;; - pax) - am__tar='pax -L -x $1 -w "$$tardir"' - am__tar_='pax -L -x $1 -w "$tardir"' - am__untar='pax -r' - ;; - cpio) - am__tar='find "$$tardir" -print | cpio -o -H $1 -L' - am__tar_='find "$tardir" -print | cpio -o -H $1 -L' - am__untar='cpio -i -H $1 -d' - ;; - none) - am__tar=false - am__tar_=false - am__untar=false - ;; - esac - - # If the value was cached, stop now. We just wanted to have am__tar - # and am__untar set. - test -n "${am_cv_prog_tar_$1}" && break - - # tar/untar a dummy directory, and stop if the command works. - rm -rf conftest.dir - mkdir conftest.dir - echo GrepMe > conftest.dir/file - AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) - rm -rf conftest.dir - if test -s conftest.tar; then - AM_RUN_LOG([$am__untar /dev/null 2>&1 && break - fi - done - rm -rf conftest.dir - - AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) - AC_MSG_RESULT([$am_cv_prog_tar_$1])]) - -AC_SUBST([am__tar]) -AC_SUBST([am__untar]) -]) # _AM_PROG_TAR - diff --git a/c/consumer-verification/compile b/c/consumer-verification/compile deleted file mode 100755 index a85b723c7..000000000 --- a/c/consumer-verification/compile +++ /dev/null @@ -1,347 +0,0 @@ -#! /bin/sh -# Wrapper for compilers which do not understand '-c -o'. - -scriptversion=2012-10-14.11; # UTC - -# Copyright (C) 1999-2014 Free Software Foundation, Inc. -# Written by Tom Tromey . -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2, or (at your option) -# any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - -# This file is maintained in Automake, please report -# bugs to or send patches to -# . - -nl=' -' - -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent tools from complaining about whitespace usage. -IFS=" "" $nl" - -file_conv= - -# func_file_conv build_file lazy -# Convert a $build file to $host form and store it in $file -# Currently only supports Windows hosts. If the determined conversion -# type is listed in (the comma separated) LAZY, no conversion will -# take place. -func_file_conv () -{ - file=$1 - case $file in - / | /[!/]*) # absolute file, and not a UNC file - if test -z "$file_conv"; then - # lazily determine how to convert abs files - case `uname -s` in - MINGW*) - file_conv=mingw - ;; - CYGWIN*) - file_conv=cygwin - ;; - *) - file_conv=wine - ;; - esac - fi - case $file_conv/,$2, in - *,$file_conv,*) - ;; - mingw/*) - file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'` - ;; - cygwin/*) - file=`cygpath -m "$file" || echo "$file"` - ;; - wine/*) - file=`winepath -w "$file" || echo "$file"` - ;; - esac - ;; - esac -} - -# func_cl_dashL linkdir -# Make cl look for libraries in LINKDIR -func_cl_dashL () -{ - func_file_conv "$1" - if test -z "$lib_path"; then - lib_path=$file - else - lib_path="$lib_path;$file" - fi - linker_opts="$linker_opts -LIBPATH:$file" -} - -# func_cl_dashl library -# Do a library search-path lookup for cl -func_cl_dashl () -{ - lib=$1 - found=no - save_IFS=$IFS - IFS=';' - for dir in $lib_path $LIB - do - IFS=$save_IFS - if $shared && test -f "$dir/$lib.dll.lib"; then - found=yes - lib=$dir/$lib.dll.lib - break - fi - if test -f "$dir/$lib.lib"; then - found=yes - lib=$dir/$lib.lib - break - fi - if test -f "$dir/lib$lib.a"; then - found=yes - lib=$dir/lib$lib.a - break - fi - done - IFS=$save_IFS - - if test "$found" != yes; then - lib=$lib.lib - fi -} - -# func_cl_wrapper cl arg... -# Adjust compile command to suit cl -func_cl_wrapper () -{ - # Assume a capable shell - lib_path= - shared=: - linker_opts= - for arg - do - if test -n "$eat"; then - eat= - else - case $1 in - -o) - # configure might choose to run compile as 'compile cc -o foo foo.c'. - eat=1 - case $2 in - *.o | *.[oO][bB][jJ]) - func_file_conv "$2" - set x "$@" -Fo"$file" - shift - ;; - *) - func_file_conv "$2" - set x "$@" -Fe"$file" - shift - ;; - esac - ;; - -I) - eat=1 - func_file_conv "$2" mingw - set x "$@" -I"$file" - shift - ;; - -I*) - func_file_conv "${1#-I}" mingw - set x "$@" -I"$file" - shift - ;; - -l) - eat=1 - func_cl_dashl "$2" - set x "$@" "$lib" - shift - ;; - -l*) - func_cl_dashl "${1#-l}" - set x "$@" "$lib" - shift - ;; - -L) - eat=1 - func_cl_dashL "$2" - ;; - -L*) - func_cl_dashL "${1#-L}" - ;; - -static) - shared=false - ;; - -Wl,*) - arg=${1#-Wl,} - save_ifs="$IFS"; IFS=',' - for flag in $arg; do - IFS="$save_ifs" - linker_opts="$linker_opts $flag" - done - IFS="$save_ifs" - ;; - -Xlinker) - eat=1 - linker_opts="$linker_opts $2" - ;; - -*) - set x "$@" "$1" - shift - ;; - *.cc | *.CC | *.cxx | *.CXX | *.[cC]++) - func_file_conv "$1" - set x "$@" -Tp"$file" - shift - ;; - *.c | *.cpp | *.CPP | *.lib | *.LIB | *.Lib | *.OBJ | *.obj | *.[oO]) - func_file_conv "$1" mingw - set x "$@" "$file" - shift - ;; - *) - set x "$@" "$1" - shift - ;; - esac - fi - shift - done - if test -n "$linker_opts"; then - linker_opts="-link$linker_opts" - fi - exec "$@" $linker_opts - exit 1 -} - -eat= - -case $1 in - '') - echo "$0: No command. Try '$0 --help' for more information." 1>&2 - exit 1; - ;; - -h | --h*) - cat <<\EOF -Usage: compile [--help] [--version] PROGRAM [ARGS] - -Wrapper for compilers which do not understand '-c -o'. -Remove '-o dest.o' from ARGS, run PROGRAM with the remaining -arguments, and rename the output as expected. - -If you are trying to build a whole package this is not the -right script to run: please start by reading the file 'INSTALL'. - -Report bugs to . -EOF - exit $? - ;; - -v | --v*) - echo "compile $scriptversion" - exit $? - ;; - cl | *[/\\]cl | cl.exe | *[/\\]cl.exe ) - func_cl_wrapper "$@" # Doesn't return... - ;; -esac - -ofile= -cfile= - -for arg -do - if test -n "$eat"; then - eat= - else - case $1 in - -o) - # configure might choose to run compile as 'compile cc -o foo foo.c'. - # So we strip '-o arg' only if arg is an object. - eat=1 - case $2 in - *.o | *.obj) - ofile=$2 - ;; - *) - set x "$@" -o "$2" - shift - ;; - esac - ;; - *.c) - cfile=$1 - set x "$@" "$1" - shift - ;; - *) - set x "$@" "$1" - shift - ;; - esac - fi - shift -done - -if test -z "$ofile" || test -z "$cfile"; then - # If no '-o' option was seen then we might have been invoked from a - # pattern rule where we don't need one. That is ok -- this is a - # normal compilation that the losing compiler can handle. If no - # '.c' file was seen then we are probably linking. That is also - # ok. - exec "$@" -fi - -# Name of file we expect compiler to create. -cofile=`echo "$cfile" | sed 's|^.*[\\/]||; s|^[a-zA-Z]:||; s/\.c$/.o/'` - -# Create the lock directory. -# Note: use '[/\\:.-]' here to ensure that we don't use the same name -# that we are using for the .o file. Also, base the name on the expected -# object file name, since that is what matters with a parallel build. -lockdir=`echo "$cofile" | sed -e 's|[/\\:.-]|_|g'`.d -while true; do - if mkdir "$lockdir" >/dev/null 2>&1; then - break - fi - sleep 1 -done -# FIXME: race condition here if user kills between mkdir and trap. -trap "rmdir '$lockdir'; exit 1" 1 2 15 - -# Run the compile. -"$@" -ret=$? - -if test -f "$cofile"; then - test "$cofile" = "$ofile" || mv "$cofile" "$ofile" -elif test -f "${cofile}bj"; then - test "${cofile}bj" = "$ofile" || mv "${cofile}bj" "$ofile" -fi - -rmdir "$lockdir" -exit $ret - -# Local Variables: -# mode: shell-script -# sh-indentation: 2 -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "scriptversion=" -# time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-time-zone: "UTC" -# time-stamp-end: "; # UTC" -# End: diff --git a/c/consumer-verification/config.h b/c/consumer-verification/config.h deleted file mode 100644 index 16ebc4199..000000000 --- a/c/consumer-verification/config.h +++ /dev/null @@ -1,95 +0,0 @@ -/* config.h. Generated from config.h.in by configure. */ -/* config.h.in. Generated from configure.ac by autoheader. */ - -/* Define to 1 if you have a functional curl library. */ -#define HAVE_LIBCURL 1 - -/* Defined if libcurl supports AsynchDNS */ -/* #undef LIBCURL_FEATURE_ASYNCHDNS */ - -/* Defined if libcurl supports IDN */ -/* #undef LIBCURL_FEATURE_IDN */ - -/* Defined if libcurl supports IPv6 */ -#define LIBCURL_FEATURE_IPV6 1 - -/* Defined if libcurl supports KRB4 */ -/* #undef LIBCURL_FEATURE_KRB4 */ - -/* Defined if libcurl supports libz */ -#define LIBCURL_FEATURE_LIBZ 1 - -/* Defined if libcurl supports NTLM */ -/* #undef LIBCURL_FEATURE_NTLM */ - -/* Defined if libcurl supports SSL */ -/* #undef LIBCURL_FEATURE_SSL */ - -/* Defined if libcurl supports SSPI */ -/* #undef LIBCURL_FEATURE_SSPI */ - -/* Defined if libcurl supports DICT */ -#define LIBCURL_PROTOCOL_DICT 1 - -/* Defined if libcurl supports FILE */ -#define LIBCURL_PROTOCOL_FILE 1 - -/* Defined if libcurl supports FTP */ -#define LIBCURL_PROTOCOL_FTP 1 - -/* Defined if libcurl supports FTPS */ -/* #undef LIBCURL_PROTOCOL_FTPS */ - -/* Defined if libcurl supports HTTP */ -#define LIBCURL_PROTOCOL_HTTP 1 - -/* Defined if libcurl supports HTTPS */ -/* #undef LIBCURL_PROTOCOL_HTTPS */ - -/* Defined if libcurl supports IMAP */ -#define LIBCURL_PROTOCOL_IMAP 1 - -/* Defined if libcurl supports LDAP */ -/* #undef LIBCURL_PROTOCOL_LDAP */ - -/* Defined if libcurl supports POP3 */ -#define LIBCURL_PROTOCOL_POP3 1 - -/* Defined if libcurl supports RTSP */ -#define LIBCURL_PROTOCOL_RTSP 1 - -/* Defined if libcurl supports SMTP */ -#define LIBCURL_PROTOCOL_SMTP 1 - -/* Defined if libcurl supports TELNET */ -#define LIBCURL_PROTOCOL_TELNET 1 - -/* Defined if libcurl supports TFTP */ -#define LIBCURL_PROTOCOL_TFTP 1 - -/* Name of package */ -#define PACKAGE "consumer-verification" - -/* Define to the address where bug reports for this package should be sent. */ -#define PACKAGE_BUGREPORT "bug-automake@gnu.org" - -/* Define to the full name of this package. */ -#define PACKAGE_NAME "consumer-verification" - -/* Define to the full name and version of this package. */ -#define PACKAGE_STRING "consumer-verification 0.0.0" - -/* Define to the one symbol short name of this package. */ -#define PACKAGE_TARNAME "consumer-verification" - -/* Define to the home page for this package. */ -#define PACKAGE_URL "" - -/* Define to the version of this package. */ -#define PACKAGE_VERSION "0.0.0" - -/* Version number of package */ -#define VERSION "0.0.0" - -/* Define curl_free() as free() if our version of curl lacks curl_free. */ -/* #undef curl_free */ diff --git a/c/consumer-verification/config.h.in b/c/consumer-verification/config.h.in deleted file mode 100644 index 190f05278..000000000 --- a/c/consumer-verification/config.h.in +++ /dev/null @@ -1,94 +0,0 @@ -/* config.h.in. Generated from configure.ac by autoheader. */ - -/* Define to 1 if you have a functional curl library. */ -#undef HAVE_LIBCURL - -/* Defined if libcurl supports AsynchDNS */ -#undef LIBCURL_FEATURE_ASYNCHDNS - -/* Defined if libcurl supports IDN */ -#undef LIBCURL_FEATURE_IDN - -/* Defined if libcurl supports IPv6 */ -#undef LIBCURL_FEATURE_IPV6 - -/* Defined if libcurl supports KRB4 */ -#undef LIBCURL_FEATURE_KRB4 - -/* Defined if libcurl supports libz */ -#undef LIBCURL_FEATURE_LIBZ - -/* Defined if libcurl supports NTLM */ -#undef LIBCURL_FEATURE_NTLM - -/* Defined if libcurl supports SSL */ -#undef LIBCURL_FEATURE_SSL - -/* Defined if libcurl supports SSPI */ -#undef LIBCURL_FEATURE_SSPI - -/* Defined if libcurl supports DICT */ -#undef LIBCURL_PROTOCOL_DICT - -/* Defined if libcurl supports FILE */ -#undef LIBCURL_PROTOCOL_FILE - -/* Defined if libcurl supports FTP */ -#undef LIBCURL_PROTOCOL_FTP - -/* Defined if libcurl supports FTPS */ -#undef LIBCURL_PROTOCOL_FTPS - -/* Defined if libcurl supports HTTP */ -#undef LIBCURL_PROTOCOL_HTTP - -/* Defined if libcurl supports HTTPS */ -#undef LIBCURL_PROTOCOL_HTTPS - -/* Defined if libcurl supports IMAP */ -#undef LIBCURL_PROTOCOL_IMAP - -/* Defined if libcurl supports LDAP */ -#undef LIBCURL_PROTOCOL_LDAP - -/* Defined if libcurl supports POP3 */ -#undef LIBCURL_PROTOCOL_POP3 - -/* Defined if libcurl supports RTSP */ -#undef LIBCURL_PROTOCOL_RTSP - -/* Defined if libcurl supports SMTP */ -#undef LIBCURL_PROTOCOL_SMTP - -/* Defined if libcurl supports TELNET */ -#undef LIBCURL_PROTOCOL_TELNET - -/* Defined if libcurl supports TFTP */ -#undef LIBCURL_PROTOCOL_TFTP - -/* Name of package */ -#undef PACKAGE - -/* Define to the address where bug reports for this package should be sent. */ -#undef PACKAGE_BUGREPORT - -/* Define to the full name of this package. */ -#undef PACKAGE_NAME - -/* Define to the full name and version of this package. */ -#undef PACKAGE_STRING - -/* Define to the one symbol short name of this package. */ -#undef PACKAGE_TARNAME - -/* Define to the home page for this package. */ -#undef PACKAGE_URL - -/* Define to the version of this package. */ -#undef PACKAGE_VERSION - -/* Version number of package */ -#undef VERSION - -/* Define curl_free() as free() if our version of curl lacks curl_free. */ -#undef curl_free diff --git a/c/consumer-verification/config.status b/c/consumer-verification/config.status deleted file mode 100755 index d8ff32b0d..000000000 --- a/c/consumer-verification/config.status +++ /dev/null @@ -1,1195 +0,0 @@ -#! /bin/bash -# Generated by configure. -# Run this file to recreate the current configuration. -# Compiler output produced by configure, useful for debugging -# configure, is in config.log if it exists. - -debug=false -ac_cs_recheck=false -ac_cs_silent=false - -SHELL=${CONFIG_SHELL-/bin/bash} -export SHELL -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -pR' - fi -else - as_ln_s='cp -pR' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -as_test_x='test -x' -as_executable_p=as_fn_executable_p - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -exec 6>&1 -## ----------------------------------- ## -## Main body of $CONFIG_STATUS script. ## -## ----------------------------------- ## -# Save the log message, to keep $0 and so on meaningful, and to -# report actual input values of CONFIG_FILES etc. instead of their -# values after options handling. -ac_log=" -This file was extended by consumer-verification $as_me 0.0.0, which was -generated by GNU Autoconf 2.69. Invocation command line was - - CONFIG_FILES = $CONFIG_FILES - CONFIG_HEADERS = $CONFIG_HEADERS - CONFIG_LINKS = $CONFIG_LINKS - CONFIG_COMMANDS = $CONFIG_COMMANDS - $ $0 $@ - -on `(hostname || uname -n) 2>/dev/null | sed 1q` -" - -# Files that config.status was made for. -config_files=" Makefile src/Makefile" -config_headers=" config.h" -config_commands=" depfiles" - -ac_cs_usage="\ -\`$as_me' instantiates files and other configuration actions -from templates according to the current configuration. Unless the files -and actions are specified as TAGs, all are instantiated by default. - -Usage: $0 [OPTION]... [TAG]... - - -h, --help print this help, then exit - -V, --version print version number and configuration settings, then exit - --config print configuration, then exit - -q, --quiet, --silent - do not print progress messages - -d, --debug don't remove temporary files - --recheck update $as_me by reconfiguring in the same conditions - --file=FILE[:TEMPLATE] - instantiate the configuration file FILE - --header=FILE[:TEMPLATE] - instantiate the configuration header FILE - -Configuration files: -$config_files - -Configuration headers: -$config_headers - -Configuration commands: -$config_commands - -Report bugs to ." - -ac_cs_config="" -ac_cs_version="\ -consumer-verification config.status 0.0.0 -configured by ./configure, generated by GNU Autoconf 2.69, - with options \"$ac_cs_config\" - -Copyright (C) 2012 Free Software Foundation, Inc. -This config.status script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it." - -ac_pwd='/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification' -srcdir='.' -INSTALL='/usr/bin/install -c' -MKDIR_P='/usr/bin/mkdir -p' -AWK='gawk' -test -n "$AWK" || AWK=awk -# The default lists apply if the user does not specify any file. -ac_need_defaults=: -while test $# != 0 -do - case $1 in - --*=?*) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` - ac_shift=: - ;; - --*=) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg= - ac_shift=: - ;; - *) - ac_option=$1 - ac_optarg=$2 - ac_shift=shift - ;; - esac - - case $ac_option in - # Handling of the options. - -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) - ac_cs_recheck=: ;; - --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) - $as_echo "$ac_cs_version"; exit ;; - --config | --confi | --conf | --con | --co | --c ) - $as_echo "$ac_cs_config"; exit ;; - --debug | --debu | --deb | --de | --d | -d ) - debug=: ;; - --file | --fil | --fi | --f ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - '') as_fn_error $? "missing file argument" ;; - esac - as_fn_append CONFIG_FILES " '$ac_optarg'" - ac_need_defaults=false;; - --header | --heade | --head | --hea ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - as_fn_append CONFIG_HEADERS " '$ac_optarg'" - ac_need_defaults=false;; - --he | --h) - # Conflict between --help and --header - as_fn_error $? "ambiguous option: \`$1' -Try \`$0 --help' for more information.";; - --help | --hel | -h ) - $as_echo "$ac_cs_usage"; exit ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil | --si | --s) - ac_cs_silent=: ;; - - # This is an error. - -*) as_fn_error $? "unrecognized option: \`$1' -Try \`$0 --help' for more information." ;; - - *) as_fn_append ac_config_targets " $1" - ac_need_defaults=false ;; - - esac - shift -done - -ac_configure_extra_args= - -if $ac_cs_silent; then - exec 6>/dev/null - ac_configure_extra_args="$ac_configure_extra_args --silent" -fi - -if $ac_cs_recheck; then - set X /bin/bash './configure' $ac_configure_extra_args --no-create --no-recursion - shift - $as_echo "running CONFIG_SHELL=/bin/bash $*" >&6 - CONFIG_SHELL='/bin/bash' - export CONFIG_SHELL - exec "$@" -fi - -exec 5>>config.log -{ - echo - sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX -## Running $as_me. ## -_ASBOX - $as_echo "$ac_log" -} >&5 - -# -# INIT-COMMANDS -# -AMDEP_TRUE="" MAKE="make" - - -# Handling of arguments. -for ac_config_target in $ac_config_targets -do - case $ac_config_target in - "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; - "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; - "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; - "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;; - - *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; - esac -done - - -# If the user did not use the arguments to specify the items to instantiate, -# then the envvar interface is used. Set only those that are not. -# We use the long form for the default assignment because of an extremely -# bizarre bug on SunOS 4.1.3. -if $ac_need_defaults; then - test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files - test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers - test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands -fi - -# Have a temporary directory for convenience. Make it in the build tree -# simply because there is no reason against having it here, and in addition, -# creating and moving files from /tmp can sometimes cause problems. -# Hook for its removal unless debugging. -# Note that there is a small window in which the directory will not be cleaned: -# after its creation but before its name has been assigned to `$tmp'. -$debug || -{ - tmp= ac_tmp= - trap 'exit_status=$? - : "${ac_tmp:=$tmp}" - { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status -' 0 - trap 'as_fn_exit 1' 1 2 13 15 -} -# Create a (secure) tmp directory for tmp files. - -{ - tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && - test -d "$tmp" -} || -{ - tmp=./conf$$-$RANDOM - (umask 077 && mkdir "$tmp") -} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 -ac_tmp=$tmp - -# Set up the scripts for CONFIG_FILES section. -# No need to generate them if there are no CONFIG_FILES. -# This happens for instance with `./config.status config.h'. -if test -n "$CONFIG_FILES"; then - - -ac_cr=`echo X | tr X '\015'` -# On cygwin, bash can eat \r inside `` if the user requested igncr. -# But we know of no other shell where ac_cr would be empty at this -# point, so we can use a bashism as a fallback. -if test "x$ac_cr" = x; then - eval ac_cr=\$\'\\r\' -fi -ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` -if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then - ac_cs_awk_cr='\\r' -else - ac_cs_awk_cr=$ac_cr -fi - -echo 'BEGIN {' >"$ac_tmp/subs1.awk" && -cat >>"$ac_tmp/subs1.awk" <<\_ACAWK && -S["am__EXEEXT_FALSE"]="" -S["am__EXEEXT_TRUE"]="#" -S["LTLIBOBJS"]="" -S["LIBOBJS"]="" -S["LIBCURL"]="-L/usr/local/lib -lcurl" -S["LIBCURL_CPPFLAGS"]="-I/usr/local/include" -S["_libcurl_config"]="" -S["am__fastdepCC_FALSE"]="#" -S["am__fastdepCC_TRUE"]="" -S["CCDEPMODE"]="depmode=gcc3" -S["am__nodep"]="_no" -S["AMDEPBACKSLASH"]="\\" -S["AMDEP_FALSE"]="#" -S["AMDEP_TRUE"]="" -S["am__include"]="include" -S["DEPDIR"]=".deps" -S["OBJEXT"]="o" -S["EXEEXT"]="" -S["ac_ct_CC"]="gcc" -S["CPPFLAGS"]="" -S["LDFLAGS"]="" -S["CFLAGS"]="-g -O2" -S["CC"]="gcc" -S["AM_BACKSLASH"]="\\" -S["AM_DEFAULT_VERBOSITY"]="1" -S["AM_DEFAULT_V"]="$(AM_DEFAULT_VERBOSITY)" -S["AM_V"]="$(V)" -S["am__untar"]="$${TAR-tar} xf -" -S["am__tar"]="$${TAR-tar} chof - \"$$tardir\"" -S["AMTAR"]="$${TAR-tar}" -S["am__leading_dot"]="." -S["SET_MAKE"]="" -S["AWK"]="gawk" -S["mkdir_p"]="$(MKDIR_P)" -S["MKDIR_P"]="/usr/bin/mkdir -p" -S["INSTALL_STRIP_PROGRAM"]="$(install_sh) -c -s" -S["STRIP"]="" -S["install_sh"]="${SHELL} /home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/install-sh" -S["MAKEINFO"]="${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' makeinfo" -S["AUTOHEADER"]="${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' autoheader" -S["AUTOMAKE"]="${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' automake-1.16" -S["AUTOCONF"]="${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' autoconf" -S["ACLOCAL"]="${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' aclocal-1.16" -S["VERSION"]="0.0.0" -S["PACKAGE"]="consumer-verification" -S["CYGPATH_W"]="echo" -S["am__isrc"]="" -S["INSTALL_DATA"]="${INSTALL} -m 644" -S["INSTALL_SCRIPT"]="${INSTALL}" -S["INSTALL_PROGRAM"]="${INSTALL}" -S["target_alias"]="" -S["host_alias"]="" -S["build_alias"]="" -S["LIBS"]="" -S["ECHO_T"]="" -S["ECHO_N"]="-n" -S["ECHO_C"]="" -S["DEFS"]="-DHAVE_CONFIG_H" -S["mandir"]="${datarootdir}/man" -S["localedir"]="${datarootdir}/locale" -S["libdir"]="${exec_prefix}/lib" -S["psdir"]="${docdir}" -S["pdfdir"]="${docdir}" -S["dvidir"]="${docdir}" -S["htmldir"]="${docdir}" -S["infodir"]="${datarootdir}/info" -S["docdir"]="${datarootdir}/doc/${PACKAGE_TARNAME}" -S["oldincludedir"]="/usr/include" -S["includedir"]="${prefix}/include" -S["runstatedir"]="${localstatedir}/run" -S["localstatedir"]="${prefix}/var" -S["sharedstatedir"]="${prefix}/com" -S["sysconfdir"]="${prefix}/etc" -S["datadir"]="${datarootdir}" -S["datarootdir"]="${prefix}/share" -S["libexecdir"]="${exec_prefix}/libexec" -S["sbindir"]="${exec_prefix}/sbin" -S["bindir"]="${exec_prefix}/bin" -S["program_transform_name"]="s,x,x," -S["prefix"]="/usr/local" -S["exec_prefix"]="${prefix}" -S["PACKAGE_URL"]="" -S["PACKAGE_BUGREPORT"]="bug-automake@gnu.org" -S["PACKAGE_STRING"]="consumer-verification 0.0.0" -S["PACKAGE_VERSION"]="0.0.0" -S["PACKAGE_TARNAME"]="consumer-verification" -S["PACKAGE_NAME"]="consumer-verification" -S["PATH_SEPARATOR"]=":" -S["SHELL"]="/bin/bash" -S["am__quote"]="" -_ACAWK -cat >>"$ac_tmp/subs1.awk" <<_ACAWK && - for (key in S) S_is_set[key] = 1 - FS = "" - -} -{ - line = $ 0 - nfields = split(line, field, "@") - substed = 0 - len = length(field[1]) - for (i = 2; i < nfields; i++) { - key = field[i] - keylen = length(key) - if (S_is_set[key]) { - value = S[key] - line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) - len += length(value) + length(field[++i]) - substed = 1 - } else - len += 1 + keylen - } - - print line -} - -_ACAWK -if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then - sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" -else - cat -fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ - || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 -fi # test -n "$CONFIG_FILES" - -# Set up the scripts for CONFIG_HEADERS section. -# No need to generate them if there are no CONFIG_HEADERS. -# This happens for instance with `./config.status Makefile'. -if test -n "$CONFIG_HEADERS"; then -cat >"$ac_tmp/defines.awk" <<\_ACAWK || -BEGIN { -D["PACKAGE_NAME"]=" \"consumer-verification\"" -D["PACKAGE_TARNAME"]=" \"consumer-verification\"" -D["PACKAGE_VERSION"]=" \"0.0.0\"" -D["PACKAGE_STRING"]=" \"consumer-verification 0.0.0\"" -D["PACKAGE_BUGREPORT"]=" \"bug-automake@gnu.org\"" -D["PACKAGE_URL"]=" \"\"" -D["PACKAGE"]=" \"consumer-verification\"" -D["VERSION"]=" \"0.0.0\"" -D["HAVE_LIBCURL"]=" 1" -D["LIBCURL_FEATURE_IPV6"]=" 1" -D["LIBCURL_FEATURE_LIBZ"]=" 1" -D["LIBCURL_PROTOCOL_DICT"]=" 1" -D["LIBCURL_PROTOCOL_FILE"]=" 1" -D["LIBCURL_PROTOCOL_FTP"]=" 1" -D["LIBCURL_PROTOCOL_GOPHER"]=" 1" -D["LIBCURL_PROTOCOL_HTTP"]=" 1" -D["LIBCURL_PROTOCOL_IMAP"]=" 1" -D["LIBCURL_PROTOCOL_POP3"]=" 1" -D["LIBCURL_PROTOCOL_RTSP"]=" 1" -D["LIBCURL_PROTOCOL_SMTP"]=" 1" -D["LIBCURL_PROTOCOL_TELNET"]=" 1" -D["LIBCURL_PROTOCOL_TFTP"]=" 1" - for (key in D) D_is_set[key] = 1 - FS = "" -} -/^[\t ]*#[\t ]*(define|undef)[\t ]+[_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ][_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789]*([\t (]|$)/ { - line = $ 0 - split(line, arg, " ") - if (arg[1] == "#") { - defundef = arg[2] - mac1 = arg[3] - } else { - defundef = substr(arg[1], 2) - mac1 = arg[2] - } - split(mac1, mac2, "(") #) - macro = mac2[1] - prefix = substr(line, 1, index(line, defundef) - 1) - if (D_is_set[macro]) { - # Preserve the white space surrounding the "#". - print prefix "define", macro P[macro] D[macro] - next - } else { - # Replace #undef with comments. This is necessary, for example, - # in the case of _POSIX_SOURCE, which is predefined and required - # on some systems where configure will not decide to define it. - if (defundef == "undef") { - print "/*", prefix defundef, macro, "*/" - next - } - } -} -{ print } -_ACAWK - as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 -fi # test -n "$CONFIG_HEADERS" - - -eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" -shift -for ac_tag -do - case $ac_tag in - :[FHLC]) ac_mode=$ac_tag; continue;; - esac - case $ac_mode$ac_tag in - :[FHL]*:*);; - :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; - :[FH]-) ac_tag=-:-;; - :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; - esac - ac_save_IFS=$IFS - IFS=: - set x $ac_tag - IFS=$ac_save_IFS - shift - ac_file=$1 - shift - - case $ac_mode in - :L) ac_source=$1;; - :[FH]) - ac_file_inputs= - for ac_f - do - case $ac_f in - -) ac_f="$ac_tmp/stdin";; - *) # Look for the file first in the build tree, then in the source tree - # (if the path is not absolute). The absolute path cannot be DOS-style, - # because $ac_f cannot contain `:'. - test -f "$ac_f" || - case $ac_f in - [\\/$]*) false;; - *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; - esac || - as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; - esac - case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac - as_fn_append ac_file_inputs " '$ac_f'" - done - - # Let's still pretend it is `configure' which instantiates (i.e., don't - # use $as_me), people would be surprised to read: - # /* config.h. Generated by config.status. */ - configure_input='Generated from '` - $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' - `' by configure.' - if test x"$ac_file" != x-; then - configure_input="$ac_file. $configure_input" - { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 -$as_echo "$as_me: creating $ac_file" >&6;} - fi - # Neutralize special characters interpreted by sed in replacement strings. - case $configure_input in #( - *\&* | *\|* | *\\* ) - ac_sed_conf_input=`$as_echo "$configure_input" | - sed 's/[\\\\&|]/\\\\&/g'`;; #( - *) ac_sed_conf_input=$configure_input;; - esac - - case $ac_tag in - *:-:* | *:-) cat >"$ac_tmp/stdin" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; - esac - ;; - esac - - ac_dir=`$as_dirname -- "$ac_file" || -$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$ac_file" : 'X\(//\)[^/]' \| \ - X"$ac_file" : 'X\(//\)$' \| \ - X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$ac_file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - as_dir="$ac_dir"; as_fn_mkdir_p - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - - case $ac_mode in - :F) - # - # CONFIG_FILE - # - - case $INSTALL in - [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; - *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; - esac - ac_MKDIR_P=$MKDIR_P - case $MKDIR_P in - [\\/$]* | ?:[\\/]* ) ;; - */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; - esac -# If the template does not know about datarootdir, expand it. -# FIXME: This hack should be removed a few years after 2.60. -ac_datarootdir_hack=; ac_datarootdir_seen= -ac_sed_dataroot=' -/datarootdir/ { - p - q -} -/@datadir@/p -/@docdir@/p -/@infodir@/p -/@localedir@/p -/@mandir@/p' -case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in -*datarootdir*) ac_datarootdir_seen=yes;; -*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 -$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} - ac_datarootdir_hack=' - s&@datadir@&${datarootdir}&g - s&@docdir@&${datarootdir}/doc/${PACKAGE_TARNAME}&g - s&@infodir@&${datarootdir}/info&g - s&@localedir@&${datarootdir}/locale&g - s&@mandir@&${datarootdir}/man&g - s&\${datarootdir}&${prefix}/share&g' ;; -esac -ac_sed_extra="/^[ ]*VPATH[ ]*=[ ]*/{ -h -s/// -s/^/:/ -s/[ ]*$/:/ -s/:\$(srcdir):/:/g -s/:\${srcdir}:/:/g -s/:@srcdir@:/:/g -s/^:*// -s/:*$// -x -s/\(=[ ]*\).*/\1/ -G -s/\n// -s/^[^=]*=[ ]*$// -} - -:t -/@[a-zA-Z_][a-zA-Z_0-9]*@/!b -s|@configure_input@|$ac_sed_conf_input|;t t -s&@top_builddir@&$ac_top_builddir_sub&;t t -s&@top_build_prefix@&$ac_top_build_prefix&;t t -s&@srcdir@&$ac_srcdir&;t t -s&@abs_srcdir@&$ac_abs_srcdir&;t t -s&@top_srcdir@&$ac_top_srcdir&;t t -s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t -s&@builddir@&$ac_builddir&;t t -s&@abs_builddir@&$ac_abs_builddir&;t t -s&@abs_top_builddir@&$ac_abs_top_builddir&;t t -s&@INSTALL@&$ac_INSTALL&;t t -s&@MKDIR_P@&$ac_MKDIR_P&;t t -$ac_datarootdir_hack -" -eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ - >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - -test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && - { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && - { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ - "$ac_tmp/out"`; test -z "$ac_out"; } && - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&5 -$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&2;} - - rm -f "$ac_tmp/stdin" - case $ac_file in - -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; - *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; - esac \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - ;; - :H) - # - # CONFIG_HEADER - # - if test x"$ac_file" != x-; then - { - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" - } >"$ac_tmp/config.h" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then - { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 -$as_echo "$as_me: $ac_file is unchanged" >&6;} - else - rm -f "$ac_file" - mv "$ac_tmp/config.h" "$ac_file" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - fi - else - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ - || as_fn_error $? "could not create -" "$LINENO" 5 - fi -# Compute "$ac_file"'s index in $config_headers. -_am_arg="$ac_file" -_am_stamp_count=1 -for _am_header in $config_headers :; do - case $_am_header in - $_am_arg | $_am_arg:* ) - break ;; - * ) - _am_stamp_count=`expr $_am_stamp_count + 1` ;; - esac -done -echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || -$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$_am_arg" : 'X\(//\)[^/]' \| \ - X"$_am_arg" : 'X\(//\)$' \| \ - X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$_am_arg" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'`/stamp-h$_am_stamp_count - ;; - - :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 -$as_echo "$as_me: executing $ac_file commands" >&6;} - ;; - esac - - - case $ac_file$ac_mode in - "depfiles":C) test x"$AMDEP_TRUE" != x"" || { - # Older Autoconf quotes --file arguments for eval, but not when files - # are listed without --file. Let's play safe and only enable the eval - # if we detect the quoting. - # TODO: see whether this extra hack can be removed once we start - # requiring Autoconf 2.70 or later. - case $CONFIG_FILES in #( - *\'*) : - eval set x "$CONFIG_FILES" ;; #( - *) : - set x $CONFIG_FILES ;; #( - *) : - ;; -esac - shift - # Used to flag and report bootstrapping failures. - am_rc=0 - for am_mf - do - # Strip MF so we end up with the name of the file. - am_mf=`$as_echo "$am_mf" | sed -e 's/:.*$//'` - # Check whether this is an Automake generated Makefile which includes - # dependency-tracking related rules and includes. - # Grep'ing the whole file directly is not great: AIX grep has a line - # limit of 2048, but all sed's we know have understand at least 4000. - sed -n 's,^am--depfiles:.*,X,p' "$am_mf" | grep X >/dev/null 2>&1 \ - || continue - am_dirpart=`$as_dirname -- "$am_mf" || -$as_expr X"$am_mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$am_mf" : 'X\(//\)[^/]' \| \ - X"$am_mf" : 'X\(//\)$' \| \ - X"$am_mf" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$am_mf" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - am_filepart=`$as_basename -- "$am_mf" || -$as_expr X/"$am_mf" : '.*/\([^/][^/]*\)/*$' \| \ - X"$am_mf" : 'X\(//\)$' \| \ - X"$am_mf" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$am_mf" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - { echo "$as_me:$LINENO: cd "$am_dirpart" \ - && sed -e '/# am--include-marker/d' "$am_filepart" \ - | $MAKE -f - am--depfiles" >&5 - (cd "$am_dirpart" \ - && sed -e '/# am--include-marker/d' "$am_filepart" \ - | $MAKE -f - am--depfiles) >&5 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } || am_rc=$? - done - if test $am_rc -ne 0; then - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "Something went wrong bootstrapping makefile fragments - for automatic dependency tracking. If GNU make was not used, consider - re-running the configure script with MAKE=\"gmake\" (or whatever is - necessary). You can also try re-running configure with the - '--disable-dependency-tracking' option to at least be able to build - the package (albeit without support for automatic dependency tracking). -See \`config.log' for more details" "$LINENO" 5; } - fi - { am_dirpart=; unset am_dirpart;} - { am_filepart=; unset am_filepart;} - { am_mf=; unset am_mf;} - { am_rc=; unset am_rc;} - rm -f conftest-deps.mk -} - ;; - - esac -done # for ac_tag - - -as_fn_exit 0 diff --git a/c/consumer-verification/configure b/c/consumer-verification/configure deleted file mode 100755 index 6eef464e4..000000000 --- a/c/consumer-verification/configure +++ /dev/null @@ -1,5469 +0,0 @@ -#! /bin/sh -# Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69 for consumer-verification 0.0.0. -# -# Report bugs to . -# -# -# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. -# -# -# This configure script is free software; the Free Software Foundation -# gives unlimited permission to copy, distribute and modify it. -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -# Use a proper internal environment variable to ensure we don't fall - # into an infinite loop, continuously re-executing ourselves. - if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then - _as_can_reexec=no; export _as_can_reexec; - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 -as_fn_exit 255 - fi - # We don't want this to propagate to other subprocesses. - { _as_can_reexec=; unset _as_can_reexec;} -if test "x$CONFIG_SHELL" = x; then - as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which - # is contrary to our usage. Disable this feature. - alias -g '\${1+\"\$@\"}'='\"\$@\"' - setopt NO_GLOB_SUBST -else - case \`(set -o) 2>/dev/null\` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi -" - as_required="as_fn_return () { (exit \$1); } -as_fn_success () { as_fn_return 0; } -as_fn_failure () { as_fn_return 1; } -as_fn_ret_success () { return 0; } -as_fn_ret_failure () { return 1; } - -exitcode=0 -as_fn_success || { exitcode=1; echo as_fn_success failed.; } -as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } -as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } -as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } -if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : - -else - exitcode=1; echo positional parameters were not saved. -fi -test x\$exitcode = x0 || exit 1 -test -x / || exit 1" - as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO - as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO - eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && - test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1" - if (eval "$as_required") 2>/dev/null; then : - as_have_required=yes -else - as_have_required=no -fi - if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : - -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -as_found=false -for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - as_found=: - case $as_dir in #( - /*) - for as_base in sh bash ksh sh5; do - # Try only shells that exist, to save several forks. - as_shell=$as_dir/$as_base - if { test -f "$as_shell" || test -f "$as_shell.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : - CONFIG_SHELL=$as_shell as_have_required=yes - if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : - break 2 -fi -fi - done;; - esac - as_found=false -done -$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : - CONFIG_SHELL=$SHELL as_have_required=yes -fi; } -IFS=$as_save_IFS - - - if test "x$CONFIG_SHELL" != x; then : - export CONFIG_SHELL - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 -exit 255 -fi - - if test x$as_have_required = xno; then : - $as_echo "$0: This script requires a shell more modern than all" - $as_echo "$0: the shells that I found on your system." - if test x${ZSH_VERSION+set} = xset ; then - $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" - $as_echo "$0: be upgraded to zsh 4.3.4 or later." - else - $as_echo "$0: Please tell bug-autoconf@gnu.org and -$0: bug-automake@gnu.org about your system, including any -$0: error possibly output before this message. Then install -$0: a modern shell, or manually run the script under such a -$0: shell if you do have one." - fi - exit 1 -fi -fi -fi -SHELL=${CONFIG_SHELL-/bin/sh} -export SHELL -# Unset more variables known to interfere with behavior of common tools. -CLICOLOR_FORCE= GREP_OPTIONS= -unset CLICOLOR_FORCE GREP_OPTIONS - -## --------------------- ## -## M4sh Shell Functions. ## -## --------------------- ## -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - - - as_lineno_1=$LINENO as_lineno_1a=$LINENO - as_lineno_2=$LINENO as_lineno_2a=$LINENO - eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && - test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { - # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) - sed -n ' - p - /[$]LINENO/= - ' <$as_myself | - sed ' - s/[$]LINENO.*/&-/ - t lineno - b - :lineno - N - :loop - s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ - t loop - s/-\n.*// - ' >$as_me.lineno && - chmod +x "$as_me.lineno" || - { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } - - # If we had to re-execute with $CONFIG_SHELL, we're ensured to have - # already done that, so ensure we don't try to do so again and fall - # in an infinite loop. This has already happened in practice. - _as_can_reexec=no; export _as_can_reexec - # Don't try to exec as it changes $[0], causing all sort of problems - # (the dirname of $[0] is not the place where we might find the - # original and so on. Autoconf is especially sensitive to this). - . "./$as_me.lineno" - # Exit status is that of the last command. - exit -} - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -pR' - fi -else - as_ln_s='cp -pR' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - -as_test_x='test -x' -as_executable_p=as_fn_executable_p - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -test -n "$DJDIR" || exec 7<&0 &1 - -# Name of the host. -# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, -# so uname gets run too. -ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` - -# -# Initializations. -# -ac_default_prefix=/usr/local -ac_clean_files= -ac_config_libobj_dir=. -LIBOBJS= -cross_compiling=no -subdirs= -MFLAGS= -MAKEFLAGS= - -# Identity of this package. -PACKAGE_NAME='consumer-verification' -PACKAGE_TARNAME='consumer-verification' -PACKAGE_VERSION='0.0.0' -PACKAGE_STRING='consumer-verification 0.0.0' -PACKAGE_BUGREPORT='bug-automake@gnu.org' -PACKAGE_URL='' - -ac_subst_vars='am__EXEEXT_FALSE -am__EXEEXT_TRUE -LTLIBOBJS -LIBOBJS -LIBCURL -LIBCURL_CPPFLAGS -_libcurl_config -am__fastdepCC_FALSE -am__fastdepCC_TRUE -CCDEPMODE -am__nodep -AMDEPBACKSLASH -AMDEP_FALSE -AMDEP_TRUE -am__include -DEPDIR -OBJEXT -EXEEXT -ac_ct_CC -CPPFLAGS -LDFLAGS -CFLAGS -CC -AM_BACKSLASH -AM_DEFAULT_VERBOSITY -AM_DEFAULT_V -AM_V -am__untar -am__tar -AMTAR -am__leading_dot -SET_MAKE -AWK -mkdir_p -MKDIR_P -INSTALL_STRIP_PROGRAM -STRIP -install_sh -MAKEINFO -AUTOHEADER -AUTOMAKE -AUTOCONF -ACLOCAL -VERSION -PACKAGE -CYGPATH_W -am__isrc -INSTALL_DATA -INSTALL_SCRIPT -INSTALL_PROGRAM -target_alias -host_alias -build_alias -LIBS -ECHO_T -ECHO_N -ECHO_C -DEFS -mandir -localedir -libdir -psdir -pdfdir -dvidir -htmldir -infodir -docdir -oldincludedir -includedir -runstatedir -localstatedir -sharedstatedir -sysconfdir -datadir -datarootdir -libexecdir -sbindir -bindir -program_transform_name -prefix -exec_prefix -PACKAGE_URL -PACKAGE_BUGREPORT -PACKAGE_STRING -PACKAGE_VERSION -PACKAGE_TARNAME -PACKAGE_NAME -PATH_SEPARATOR -SHELL -am__quote' -ac_subst_files='' -ac_user_opts=' -enable_option_checking -enable_silent_rules -enable_dependency_tracking -with_libcurl -' - ac_precious_vars='build_alias -host_alias -target_alias -CC -CFLAGS -LDFLAGS -LIBS -CPPFLAGS' - - -# Initialize some variables set by options. -ac_init_help= -ac_init_version=false -ac_unrecognized_opts= -ac_unrecognized_sep= -# The variables have the same names as the options, with -# dashes changed to underlines. -cache_file=/dev/null -exec_prefix=NONE -no_create= -no_recursion= -prefix=NONE -program_prefix=NONE -program_suffix=NONE -program_transform_name=s,x,x, -silent= -site= -srcdir= -verbose= -x_includes=NONE -x_libraries=NONE - -# Installation directory options. -# These are left unexpanded so users can "make install exec_prefix=/foo" -# and all the variables that are supposed to be based on exec_prefix -# by default will actually change. -# Use braces instead of parens because sh, perl, etc. also accept them. -# (The list follows the same order as the GNU Coding Standards.) -bindir='${exec_prefix}/bin' -sbindir='${exec_prefix}/sbin' -libexecdir='${exec_prefix}/libexec' -datarootdir='${prefix}/share' -datadir='${datarootdir}' -sysconfdir='${prefix}/etc' -sharedstatedir='${prefix}/com' -localstatedir='${prefix}/var' -runstatedir='${localstatedir}/run' -includedir='${prefix}/include' -oldincludedir='/usr/include' -docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' -infodir='${datarootdir}/info' -htmldir='${docdir}' -dvidir='${docdir}' -pdfdir='${docdir}' -psdir='${docdir}' -libdir='${exec_prefix}/lib' -localedir='${datarootdir}/locale' -mandir='${datarootdir}/man' - -ac_prev= -ac_dashdash= -for ac_option -do - # If the previous option needs an argument, assign it. - if test -n "$ac_prev"; then - eval $ac_prev=\$ac_option - ac_prev= - continue - fi - - case $ac_option in - *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; - *=) ac_optarg= ;; - *) ac_optarg=yes ;; - esac - - # Accept the important Cygnus configure options, so we can diagnose typos. - - case $ac_dashdash$ac_option in - --) - ac_dashdash=yes ;; - - -bindir | --bindir | --bindi | --bind | --bin | --bi) - ac_prev=bindir ;; - -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) - bindir=$ac_optarg ;; - - -build | --build | --buil | --bui | --bu) - ac_prev=build_alias ;; - -build=* | --build=* | --buil=* | --bui=* | --bu=*) - build_alias=$ac_optarg ;; - - -cache-file | --cache-file | --cache-fil | --cache-fi \ - | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) - ac_prev=cache_file ;; - -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ - | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) - cache_file=$ac_optarg ;; - - --config-cache | -C) - cache_file=config.cache ;; - - -datadir | --datadir | --datadi | --datad) - ac_prev=datadir ;; - -datadir=* | --datadir=* | --datadi=* | --datad=*) - datadir=$ac_optarg ;; - - -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ - | --dataroo | --dataro | --datar) - ac_prev=datarootdir ;; - -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ - | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) - datarootdir=$ac_optarg ;; - - -disable-* | --disable-*) - ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=no ;; - - -docdir | --docdir | --docdi | --doc | --do) - ac_prev=docdir ;; - -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) - docdir=$ac_optarg ;; - - -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) - ac_prev=dvidir ;; - -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) - dvidir=$ac_optarg ;; - - -enable-* | --enable-*) - ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=\$ac_optarg ;; - - -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ - | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ - | --exec | --exe | --ex) - ac_prev=exec_prefix ;; - -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ - | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ - | --exec=* | --exe=* | --ex=*) - exec_prefix=$ac_optarg ;; - - -gas | --gas | --ga | --g) - # Obsolete; use --with-gas. - with_gas=yes ;; - - -help | --help | --hel | --he | -h) - ac_init_help=long ;; - -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) - ac_init_help=recursive ;; - -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) - ac_init_help=short ;; - - -host | --host | --hos | --ho) - ac_prev=host_alias ;; - -host=* | --host=* | --hos=* | --ho=*) - host_alias=$ac_optarg ;; - - -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) - ac_prev=htmldir ;; - -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ - | --ht=*) - htmldir=$ac_optarg ;; - - -includedir | --includedir | --includedi | --included | --include \ - | --includ | --inclu | --incl | --inc) - ac_prev=includedir ;; - -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ - | --includ=* | --inclu=* | --incl=* | --inc=*) - includedir=$ac_optarg ;; - - -infodir | --infodir | --infodi | --infod | --info | --inf) - ac_prev=infodir ;; - -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) - infodir=$ac_optarg ;; - - -libdir | --libdir | --libdi | --libd) - ac_prev=libdir ;; - -libdir=* | --libdir=* | --libdi=* | --libd=*) - libdir=$ac_optarg ;; - - -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ - | --libexe | --libex | --libe) - ac_prev=libexecdir ;; - -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ - | --libexe=* | --libex=* | --libe=*) - libexecdir=$ac_optarg ;; - - -localedir | --localedir | --localedi | --localed | --locale) - ac_prev=localedir ;; - -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) - localedir=$ac_optarg ;; - - -localstatedir | --localstatedir | --localstatedi | --localstated \ - | --localstate | --localstat | --localsta | --localst | --locals) - ac_prev=localstatedir ;; - -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ - | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) - localstatedir=$ac_optarg ;; - - -mandir | --mandir | --mandi | --mand | --man | --ma | --m) - ac_prev=mandir ;; - -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) - mandir=$ac_optarg ;; - - -nfp | --nfp | --nf) - # Obsolete; use --without-fp. - with_fp=no ;; - - -no-create | --no-create | --no-creat | --no-crea | --no-cre \ - | --no-cr | --no-c | -n) - no_create=yes ;; - - -no-recursion | --no-recursion | --no-recursio | --no-recursi \ - | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) - no_recursion=yes ;; - - -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ - | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ - | --oldin | --oldi | --old | --ol | --o) - ac_prev=oldincludedir ;; - -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ - | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ - | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) - oldincludedir=$ac_optarg ;; - - -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) - ac_prev=prefix ;; - -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) - prefix=$ac_optarg ;; - - -program-prefix | --program-prefix | --program-prefi | --program-pref \ - | --program-pre | --program-pr | --program-p) - ac_prev=program_prefix ;; - -program-prefix=* | --program-prefix=* | --program-prefi=* \ - | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) - program_prefix=$ac_optarg ;; - - -program-suffix | --program-suffix | --program-suffi | --program-suff \ - | --program-suf | --program-su | --program-s) - ac_prev=program_suffix ;; - -program-suffix=* | --program-suffix=* | --program-suffi=* \ - | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) - program_suffix=$ac_optarg ;; - - -program-transform-name | --program-transform-name \ - | --program-transform-nam | --program-transform-na \ - | --program-transform-n | --program-transform- \ - | --program-transform | --program-transfor \ - | --program-transfo | --program-transf \ - | --program-trans | --program-tran \ - | --progr-tra | --program-tr | --program-t) - ac_prev=program_transform_name ;; - -program-transform-name=* | --program-transform-name=* \ - | --program-transform-nam=* | --program-transform-na=* \ - | --program-transform-n=* | --program-transform-=* \ - | --program-transform=* | --program-transfor=* \ - | --program-transfo=* | --program-transf=* \ - | --program-trans=* | --program-tran=* \ - | --progr-tra=* | --program-tr=* | --program-t=*) - program_transform_name=$ac_optarg ;; - - -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) - ac_prev=pdfdir ;; - -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) - pdfdir=$ac_optarg ;; - - -psdir | --psdir | --psdi | --psd | --ps) - ac_prev=psdir ;; - -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) - psdir=$ac_optarg ;; - - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - silent=yes ;; - - -runstatedir | --runstatedir | --runstatedi | --runstated \ - | --runstate | --runstat | --runsta | --runst | --runs \ - | --run | --ru | --r) - ac_prev=runstatedir ;; - -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ - | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ - | --run=* | --ru=* | --r=*) - runstatedir=$ac_optarg ;; - - -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) - ac_prev=sbindir ;; - -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ - | --sbi=* | --sb=*) - sbindir=$ac_optarg ;; - - -sharedstatedir | --sharedstatedir | --sharedstatedi \ - | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ - | --sharedst | --shareds | --shared | --share | --shar \ - | --sha | --sh) - ac_prev=sharedstatedir ;; - -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ - | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ - | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ - | --sha=* | --sh=*) - sharedstatedir=$ac_optarg ;; - - -site | --site | --sit) - ac_prev=site ;; - -site=* | --site=* | --sit=*) - site=$ac_optarg ;; - - -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) - ac_prev=srcdir ;; - -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) - srcdir=$ac_optarg ;; - - -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ - | --syscon | --sysco | --sysc | --sys | --sy) - ac_prev=sysconfdir ;; - -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ - | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) - sysconfdir=$ac_optarg ;; - - -target | --target | --targe | --targ | --tar | --ta | --t) - ac_prev=target_alias ;; - -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) - target_alias=$ac_optarg ;; - - -v | -verbose | --verbose | --verbos | --verbo | --verb) - verbose=yes ;; - - -version | --version | --versio | --versi | --vers | -V) - ac_init_version=: ;; - - -with-* | --with-*) - ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=\$ac_optarg ;; - - -without-* | --without-*) - ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=no ;; - - --x) - # Obsolete; use --with-x. - with_x=yes ;; - - -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ - | --x-incl | --x-inc | --x-in | --x-i) - ac_prev=x_includes ;; - -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ - | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) - x_includes=$ac_optarg ;; - - -x-libraries | --x-libraries | --x-librarie | --x-librari \ - | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) - ac_prev=x_libraries ;; - -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ - | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) - x_libraries=$ac_optarg ;; - - -*) as_fn_error $? "unrecognized option: \`$ac_option' -Try \`$0 --help' for more information" - ;; - - *=*) - ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` - # Reject names that are not valid shell variable names. - case $ac_envvar in #( - '' | [0-9]* | *[!_$as_cr_alnum]* ) - as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; - esac - eval $ac_envvar=\$ac_optarg - export $ac_envvar ;; - - *) - # FIXME: should be removed in autoconf 3.0. - $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 - expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && - $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 - : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" - ;; - - esac -done - -if test -n "$ac_prev"; then - ac_option=--`echo $ac_prev | sed 's/_/-/g'` - as_fn_error $? "missing argument to $ac_option" -fi - -if test -n "$ac_unrecognized_opts"; then - case $enable_option_checking in - no) ;; - fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; - *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; - esac -fi - -# Check all directory arguments for consistency. -for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ - datadir sysconfdir sharedstatedir localstatedir includedir \ - oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ - libdir localedir mandir runstatedir -do - eval ac_val=\$$ac_var - # Remove trailing slashes. - case $ac_val in - */ ) - ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` - eval $ac_var=\$ac_val;; - esac - # Be sure to have absolute directory names. - case $ac_val in - [\\/$]* | ?:[\\/]* ) continue;; - NONE | '' ) case $ac_var in *prefix ) continue;; esac;; - esac - as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" -done - -# There might be people who depend on the old broken behavior: `$host' -# used to hold the argument of --host etc. -# FIXME: To remove some day. -build=$build_alias -host=$host_alias -target=$target_alias - -# FIXME: To remove some day. -if test "x$host_alias" != x; then - if test "x$build_alias" = x; then - cross_compiling=maybe - elif test "x$build_alias" != "x$host_alias"; then - cross_compiling=yes - fi -fi - -ac_tool_prefix= -test -n "$host_alias" && ac_tool_prefix=$host_alias- - -test "$silent" = yes && exec 6>/dev/null - - -ac_pwd=`pwd` && test -n "$ac_pwd" && -ac_ls_di=`ls -di .` && -ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || - as_fn_error $? "working directory cannot be determined" -test "X$ac_ls_di" = "X$ac_pwd_ls_di" || - as_fn_error $? "pwd does not report name of working directory" - - -# Find the source files, if location was not specified. -if test -z "$srcdir"; then - ac_srcdir_defaulted=yes - # Try the directory containing this script, then the parent directory. - ac_confdir=`$as_dirname -- "$as_myself" || -$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_myself" : 'X\(//\)[^/]' \| \ - X"$as_myself" : 'X\(//\)$' \| \ - X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_myself" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - srcdir=$ac_confdir - if test ! -r "$srcdir/$ac_unique_file"; then - srcdir=.. - fi -else - ac_srcdir_defaulted=no -fi -if test ! -r "$srcdir/$ac_unique_file"; then - test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." - as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" -fi -ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" -ac_abs_confdir=`( - cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" - pwd)` -# When building in place, set srcdir=. -if test "$ac_abs_confdir" = "$ac_pwd"; then - srcdir=. -fi -# Remove unnecessary trailing slashes from srcdir. -# Double slashes in file names in object file debugging info -# mess up M-x gdb in Emacs. -case $srcdir in -*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; -esac -for ac_var in $ac_precious_vars; do - eval ac_env_${ac_var}_set=\${${ac_var}+set} - eval ac_env_${ac_var}_value=\$${ac_var} - eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} - eval ac_cv_env_${ac_var}_value=\$${ac_var} -done - -# -# Report the --help message. -# -if test "$ac_init_help" = "long"; then - # Omit some internal or obsolete options to make the list less imposing. - # This message is too long to be a string in the A/UX 3.1 sh. - cat <<_ACEOF -\`configure' configures consumer-verification 0.0.0 to adapt to many kinds of systems. - -Usage: $0 [OPTION]... [VAR=VALUE]... - -To assign environment variables (e.g., CC, CFLAGS...), specify them as -VAR=VALUE. See below for descriptions of some of the useful variables. - -Defaults for the options are specified in brackets. - -Configuration: - -h, --help display this help and exit - --help=short display options specific to this package - --help=recursive display the short help of all the included packages - -V, --version display version information and exit - -q, --quiet, --silent do not print \`checking ...' messages - --cache-file=FILE cache test results in FILE [disabled] - -C, --config-cache alias for \`--cache-file=config.cache' - -n, --no-create do not create output files - --srcdir=DIR find the sources in DIR [configure dir or \`..'] - -Installation directories: - --prefix=PREFIX install architecture-independent files in PREFIX - [$ac_default_prefix] - --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX - [PREFIX] - -By default, \`make install' will install all the files in -\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify -an installation prefix other than \`$ac_default_prefix' using \`--prefix', -for instance \`--prefix=\$HOME'. - -For better control, use the options below. - -Fine tuning of the installation directories: - --bindir=DIR user executables [EPREFIX/bin] - --sbindir=DIR system admin executables [EPREFIX/sbin] - --libexecdir=DIR program executables [EPREFIX/libexec] - --sysconfdir=DIR read-only single-machine data [PREFIX/etc] - --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] - --localstatedir=DIR modifiable single-machine data [PREFIX/var] - --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] - --libdir=DIR object code libraries [EPREFIX/lib] - --includedir=DIR C header files [PREFIX/include] - --oldincludedir=DIR C header files for non-gcc [/usr/include] - --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] - --datadir=DIR read-only architecture-independent data [DATAROOTDIR] - --infodir=DIR info documentation [DATAROOTDIR/info] - --localedir=DIR locale-dependent data [DATAROOTDIR/locale] - --mandir=DIR man documentation [DATAROOTDIR/man] - --docdir=DIR documentation root - [DATAROOTDIR/doc/consumer-verification] - --htmldir=DIR html documentation [DOCDIR] - --dvidir=DIR dvi documentation [DOCDIR] - --pdfdir=DIR pdf documentation [DOCDIR] - --psdir=DIR ps documentation [DOCDIR] -_ACEOF - - cat <<\_ACEOF - -Program names: - --program-prefix=PREFIX prepend PREFIX to installed program names - --program-suffix=SUFFIX append SUFFIX to installed program names - --program-transform-name=PROGRAM run sed PROGRAM on installed program names -_ACEOF -fi - -if test -n "$ac_init_help"; then - case $ac_init_help in - short | recursive ) echo "Configuration of consumer-verification 0.0.0:";; - esac - cat <<\_ACEOF - -Optional Features: - --disable-option-checking ignore unrecognized --enable/--with options - --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) - --enable-FEATURE[=ARG] include FEATURE [ARG=yes] - --enable-silent-rules less verbose build output (undo: "make V=1") - --disable-silent-rules verbose build output (undo: "make V=0") - --enable-dependency-tracking - do not reject slow dependency extractors - --disable-dependency-tracking - speeds up one-time build - -Optional Packages: - --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] - --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) - --with-libcurl=PREFIX look for the curl library in PREFIX/lib and headers - in PREFIX/include - -Some influential environment variables: - CC C compiler command - CFLAGS C compiler flags - LDFLAGS linker flags, e.g. -L if you have libraries in a - nonstandard directory - LIBS libraries to pass to the linker, e.g. -l - CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if - you have headers in a nonstandard directory - -Use these variables to override the choices made by `configure' or to help -it to find libraries and programs with nonstandard names/locations. - -Report bugs to . -_ACEOF -ac_status=$? -fi - -if test "$ac_init_help" = "recursive"; then - # If there are subdirs, report their specific --help. - for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue - test -d "$ac_dir" || - { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || - continue - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - cd "$ac_dir" || { ac_status=$?; continue; } - # Check for guested configure. - if test -f "$ac_srcdir/configure.gnu"; then - echo && - $SHELL "$ac_srcdir/configure.gnu" --help=recursive - elif test -f "$ac_srcdir/configure"; then - echo && - $SHELL "$ac_srcdir/configure" --help=recursive - else - $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 - fi || ac_status=$? - cd "$ac_pwd" || { ac_status=$?; break; } - done -fi - -test -n "$ac_init_help" && exit $ac_status -if $ac_init_version; then - cat <<\_ACEOF -consumer-verification configure 0.0.0 -generated by GNU Autoconf 2.69 - -Copyright (C) 2012 Free Software Foundation, Inc. -This configure script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it. -_ACEOF - exit -fi - -## ------------------------ ## -## Autoconf initialization. ## -## ------------------------ ## - -# ac_fn_c_try_compile LINENO -# -------------------------- -# Try to compile conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext - if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_compile - -# ac_fn_c_try_link LINENO -# ----------------------- -# Try to link conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_link () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext conftest$ac_exeext - if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && { - test "$cross_compiling" = yes || - test -x conftest$ac_exeext - }; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information - # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would - # interfere with the next link command; also delete a directory that is - # left behind by Apple's compiler. We do this before executing the actions. - rm -rf conftest.dSYM conftest_ipa8_conftest.oo - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_link - -# ac_fn_c_check_func LINENO FUNC VAR -# ---------------------------------- -# Tests whether FUNC exists, setting the cache variable VAR accordingly -ac_fn_c_check_func () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -/* Define $2 to an innocuous variant, in case declares $2. - For example, HP-UX 11i declares gettimeofday. */ -#define $2 innocuous_$2 - -/* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $2 (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ - -#ifdef __STDC__ -# include -#else -# include -#endif - -#undef $2 - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char $2 (); -/* The GNU C library defines this for functions which it implements - to always fail with ENOSYS. Some functions are actually named - something starting with __ and the normal name is an alias. */ -#if defined __stub_$2 || defined __stub___$2 -choke me -#endif - -int -main () -{ -return $2 (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - eval "$3=yes" -else - eval "$3=no" -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_func -cat >config.log <<_ACEOF -This file contains any messages produced by compilers while -running configure, to aid debugging if configure makes a mistake. - -It was created by consumer-verification $as_me 0.0.0, which was -generated by GNU Autoconf 2.69. Invocation command line was - - $ $0 $@ - -_ACEOF -exec 5>>config.log -{ -cat <<_ASUNAME -## --------- ## -## Platform. ## -## --------- ## - -hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` -uname -m = `(uname -m) 2>/dev/null || echo unknown` -uname -r = `(uname -r) 2>/dev/null || echo unknown` -uname -s = `(uname -s) 2>/dev/null || echo unknown` -uname -v = `(uname -v) 2>/dev/null || echo unknown` - -/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` -/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` - -/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` -/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` -/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` -/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` -/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` -/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` -/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` - -_ASUNAME - -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - $as_echo "PATH: $as_dir" - done -IFS=$as_save_IFS - -} >&5 - -cat >&5 <<_ACEOF - - -## ----------- ## -## Core tests. ## -## ----------- ## - -_ACEOF - - -# Keep a trace of the command line. -# Strip out --no-create and --no-recursion so they do not pile up. -# Strip out --silent because we don't want to record it for future runs. -# Also quote any args containing shell meta-characters. -# Make two passes to allow for proper duplicate-argument suppression. -ac_configure_args= -ac_configure_args0= -ac_configure_args1= -ac_must_keep_next=false -for ac_pass in 1 2 -do - for ac_arg - do - case $ac_arg in - -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - continue ;; - *\'*) - ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - case $ac_pass in - 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; - 2) - as_fn_append ac_configure_args1 " '$ac_arg'" - if test $ac_must_keep_next = true; then - ac_must_keep_next=false # Got value, back to normal. - else - case $ac_arg in - *=* | --config-cache | -C | -disable-* | --disable-* \ - | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ - | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ - | -with-* | --with-* | -without-* | --without-* | --x) - case "$ac_configure_args0 " in - "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; - esac - ;; - -* ) ac_must_keep_next=true ;; - esac - fi - as_fn_append ac_configure_args " '$ac_arg'" - ;; - esac - done -done -{ ac_configure_args0=; unset ac_configure_args0;} -{ ac_configure_args1=; unset ac_configure_args1;} - -# When interrupted or exit'd, cleanup temporary files, and complete -# config.log. We remove comments because anyway the quotes in there -# would cause problems or look ugly. -# WARNING: Use '\'' to represent an apostrophe within the trap. -# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. -trap 'exit_status=$? - # Save into config.log some information that might help in debugging. - { - echo - - $as_echo "## ---------------- ## -## Cache variables. ## -## ---------------- ##" - echo - # The following way of writing the cache mishandles newlines in values, -( - for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - (set) 2>&1 | - case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - sed -n \ - "s/'\''/'\''\\\\'\'''\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" - ;; #( - *) - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) - echo - - $as_echo "## ----------------- ## -## Output variables. ## -## ----------------- ##" - echo - for ac_var in $ac_subst_vars - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - - if test -n "$ac_subst_files"; then - $as_echo "## ------------------- ## -## File substitutions. ## -## ------------------- ##" - echo - for ac_var in $ac_subst_files - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - fi - - if test -s confdefs.h; then - $as_echo "## ----------- ## -## confdefs.h. ## -## ----------- ##" - echo - cat confdefs.h - echo - fi - test "$ac_signal" != 0 && - $as_echo "$as_me: caught signal $ac_signal" - $as_echo "$as_me: exit $exit_status" - } >&5 - rm -f core *.core core.conftest.* && - rm -f -r conftest* confdefs* conf$$* $ac_clean_files && - exit $exit_status -' 0 -for ac_signal in 1 2 13 15; do - trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal -done -ac_signal=0 - -# confdefs.h avoids OS command line length limits that DEFS can exceed. -rm -f -r conftest* confdefs.h - -$as_echo "/* confdefs.h */" > confdefs.h - -# Predefined preprocessor variables. - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_NAME "$PACKAGE_NAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_TARNAME "$PACKAGE_TARNAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_VERSION "$PACKAGE_VERSION" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_STRING "$PACKAGE_STRING" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_URL "$PACKAGE_URL" -_ACEOF - - -# Let the site file select an alternate cache file if it wants to. -# Prefer an explicitly selected file to automatically selected ones. -ac_site_file1=NONE -ac_site_file2=NONE -if test -n "$CONFIG_SITE"; then - # We do not want a PATH search for config.site. - case $CONFIG_SITE in #(( - -*) ac_site_file1=./$CONFIG_SITE;; - */*) ac_site_file1=$CONFIG_SITE;; - *) ac_site_file1=./$CONFIG_SITE;; - esac -elif test "x$prefix" != xNONE; then - ac_site_file1=$prefix/share/config.site - ac_site_file2=$prefix/etc/config.site -else - ac_site_file1=$ac_default_prefix/share/config.site - ac_site_file2=$ac_default_prefix/etc/config.site -fi -for ac_site_file in "$ac_site_file1" "$ac_site_file2" -do - test "x$ac_site_file" = xNONE && continue - if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 -$as_echo "$as_me: loading site script $ac_site_file" >&6;} - sed 's/^/| /' "$ac_site_file" >&5 - . "$ac_site_file" \ - || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "failed to load site script $ac_site_file -See \`config.log' for more details" "$LINENO" 5; } - fi -done - -if test -r "$cache_file"; then - # Some versions of bash will fail to source /dev/null (special files - # actually), so we avoid doing that. DJGPP emulates it as a regular file. - if test /dev/null != "$cache_file" && test -f "$cache_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 -$as_echo "$as_me: loading cache $cache_file" >&6;} - case $cache_file in - [\\/]* | ?:[\\/]* ) . "$cache_file";; - *) . "./$cache_file";; - esac - fi -else - { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 -$as_echo "$as_me: creating cache $cache_file" >&6;} - >$cache_file -fi - -# Check that the precious variables saved in the cache have kept the same -# value. -ac_cache_corrupted=false -for ac_var in $ac_precious_vars; do - eval ac_old_set=\$ac_cv_env_${ac_var}_set - eval ac_new_set=\$ac_env_${ac_var}_set - eval ac_old_val=\$ac_cv_env_${ac_var}_value - eval ac_new_val=\$ac_env_${ac_var}_value - case $ac_old_set,$ac_new_set in - set,) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,set) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,);; - *) - if test "x$ac_old_val" != "x$ac_new_val"; then - # differences in whitespace do not lead to failure. - ac_old_val_w=`echo x $ac_old_val` - ac_new_val_w=`echo x $ac_new_val` - if test "$ac_old_val_w" != "$ac_new_val_w"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 -$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} - ac_cache_corrupted=: - else - { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 -$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} - eval $ac_var=\$ac_old_val - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 -$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 -$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} - fi;; - esac - # Pass precious variables to config.status. - if test "$ac_new_set" = set; then - case $ac_new_val in - *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; - *) ac_arg=$ac_var=$ac_new_val ;; - esac - case " $ac_configure_args " in - *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. - *) as_fn_append ac_configure_args " '$ac_arg'" ;; - esac - fi -done -if $ac_cache_corrupted; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 -$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} - as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 -fi -## -------------------- ## -## Main body of script. ## -## -------------------- ## - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - -am__api_version='1.16' - -ac_aux_dir= -for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do - if test -f "$ac_dir/install-sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install-sh -c" - break - elif test -f "$ac_dir/install.sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install.sh -c" - break - elif test -f "$ac_dir/shtool"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/shtool install -c" - break - fi -done -if test -z "$ac_aux_dir"; then - as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 -fi - -# These three variables are undocumented and unsupported, -# and are intended to be withdrawn in a future Autoconf release. -# They can cause serious problems if a builder's source tree is in a directory -# whose full name contains unusual characters. -ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. -ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. -ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. - - -# Find a good install program. We prefer a C program (faster), -# so one script is as good as another. But avoid the broken or -# incompatible versions: -# SysV /etc/install, /usr/sbin/install -# SunOS /usr/etc/install -# IRIX /sbin/install -# AIX /bin/install -# AmigaOS /C/install, which installs bootblocks on floppy discs -# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag -# AFS /usr/afsws/bin/install, which mishandles nonexistent args -# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" -# OS/2's system install, which has a completely different semantic -# ./install, which can be erroneously created by make from ./install.sh. -# Reject install programs that cannot install multiple files. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 -$as_echo_n "checking for a BSD-compatible install... " >&6; } -if test -z "$INSTALL"; then -if ${ac_cv_path_install+:} false; then : - $as_echo_n "(cached) " >&6 -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - # Account for people who put trailing slashes in PATH elements. -case $as_dir/ in #(( - ./ | .// | /[cC]/* | \ - /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ - ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ - /usr/ucb/* ) ;; - *) - # OSF1 and SCO ODT 3.0 have their own names for install. - # Don't use installbsd from OSF since it installs stuff as root - # by default. - for ac_prog in ginstall scoinst install; do - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then - if test $ac_prog = install && - grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then - # AIX install. It has an incompatible calling convention. - : - elif test $ac_prog = install && - grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then - # program-specific install script used by HP pwplus--don't use. - : - else - rm -rf conftest.one conftest.two conftest.dir - echo one > conftest.one - echo two > conftest.two - mkdir conftest.dir - if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && - test -s conftest.one && test -s conftest.two && - test -s conftest.dir/conftest.one && - test -s conftest.dir/conftest.two - then - ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" - break 3 - fi - fi - fi - done - done - ;; -esac - - done -IFS=$as_save_IFS - -rm -rf conftest.one conftest.two conftest.dir - -fi - if test "${ac_cv_path_install+set}" = set; then - INSTALL=$ac_cv_path_install - else - # As a last resort, use the slow shell script. Don't cache a - # value for INSTALL within a source directory, because that will - # break other packages using the cache if that directory is - # removed, or if the value is a relative name. - INSTALL=$ac_install_sh - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 -$as_echo "$INSTALL" >&6; } - -# Use test -z because SunOS4 sh mishandles braces in ${var-val}. -# It thinks the first close brace ends the variable substitution. -test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' - -test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' - -test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 -$as_echo_n "checking whether build environment is sane... " >&6; } -# Reject unsafe characters in $srcdir or the absolute working directory -# name. Accept space and tab only in the latter. -am_lf=' -' -case `pwd` in - *[\\\"\#\$\&\'\`$am_lf]*) - as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;; -esac -case $srcdir in - *[\\\"\#\$\&\'\`$am_lf\ \ ]*) - as_fn_error $? "unsafe srcdir value: '$srcdir'" "$LINENO" 5;; -esac - -# Do 'set' in a subshell so we don't clobber the current shell's -# arguments. Must try -L first in case configure is actually a -# symlink; some systems play weird games with the mod time of symlinks -# (eg FreeBSD returns the mod time of the symlink's containing -# directory). -if ( - am_has_slept=no - for am_try in 1 2; do - echo "timestamp, slept: $am_has_slept" > conftest.file - set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` - if test "$*" = "X"; then - # -L didn't work. - set X `ls -t "$srcdir/configure" conftest.file` - fi - if test "$*" != "X $srcdir/configure conftest.file" \ - && test "$*" != "X conftest.file $srcdir/configure"; then - - # If neither matched, then we have a broken ls. This can happen - # if, for instance, CONFIG_SHELL is bash and it inherits a - # broken ls alias from the environment. This has actually - # happened. Such a system could not be considered "sane". - as_fn_error $? "ls -t appears to fail. Make sure there is not a broken - alias in your environment" "$LINENO" 5 - fi - if test "$2" = conftest.file || test $am_try -eq 2; then - break - fi - # Just in case. - sleep 1 - am_has_slept=yes - done - test "$2" = conftest.file - ) -then - # Ok. - : -else - as_fn_error $? "newly created file is older than distributed files! -Check your system clock" "$LINENO" 5 -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -# If we didn't sleep, we still need to ensure time stamps of config.status and -# generated files are strictly newer. -am_sleep_pid= -if grep 'slept: no' conftest.file >/dev/null 2>&1; then - ( sleep 1 ) & - am_sleep_pid=$! -fi - -rm -f conftest.file - -test "$program_prefix" != NONE && - program_transform_name="s&^&$program_prefix&;$program_transform_name" -# Use a double $ so make ignores it. -test "$program_suffix" != NONE && - program_transform_name="s&\$&$program_suffix&;$program_transform_name" -# Double any \ or $. -# By default was `s,x,x', remove it if useless. -ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' -program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` - -# Expand $ac_aux_dir to an absolute path. -am_aux_dir=`cd "$ac_aux_dir" && pwd` - -if test x"${MISSING+set}" != xset; then - MISSING="\${SHELL} '$am_aux_dir/missing'" -fi -# Use eval to expand $SHELL -if eval "$MISSING --is-lightweight"; then - am_missing_run="$MISSING " -else - am_missing_run= - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: 'missing' script is too old or missing" >&5 -$as_echo "$as_me: WARNING: 'missing' script is too old or missing" >&2;} -fi - -if test x"${install_sh+set}" != xset; then - case $am_aux_dir in - *\ * | *\ *) - install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; - *) - install_sh="\${SHELL} $am_aux_dir/install-sh" - esac -fi - -# Installed binaries are usually stripped using 'strip' when the user -# run "make install-strip". However 'strip' might not be the right -# tool to use in cross-compilation environments, therefore Automake -# will honor the 'STRIP' environment variable to overrule this program. -if test "$cross_compiling" != no; then - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. -set dummy ${ac_tool_prefix}strip; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_STRIP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$STRIP"; then - ac_cv_prog_STRIP="$STRIP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_STRIP="${ac_tool_prefix}strip" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -STRIP=$ac_cv_prog_STRIP -if test -n "$STRIP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 -$as_echo "$STRIP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_STRIP"; then - ac_ct_STRIP=$STRIP - # Extract the first word of "strip", so it can be a program name with args. -set dummy strip; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_STRIP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_STRIP"; then - ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_STRIP="strip" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP -if test -n "$ac_ct_STRIP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 -$as_echo "$ac_ct_STRIP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_STRIP" = x; then - STRIP=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - STRIP=$ac_ct_STRIP - fi -else - STRIP="$ac_cv_prog_STRIP" -fi - -fi -INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 -$as_echo_n "checking for a thread-safe mkdir -p... " >&6; } -if test -z "$MKDIR_P"; then - if ${ac_cv_path_mkdir+:} false; then : - $as_echo_n "(cached) " >&6 -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in mkdir gmkdir; do - for ac_exec_ext in '' $ac_executable_extensions; do - as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue - case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( - 'mkdir (GNU coreutils) '* | \ - 'mkdir (coreutils) '* | \ - 'mkdir (fileutils) '4.1*) - ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext - break 3;; - esac - done - done - done -IFS=$as_save_IFS - -fi - - test -d ./--version && rmdir ./--version - if test "${ac_cv_path_mkdir+set}" = set; then - MKDIR_P="$ac_cv_path_mkdir -p" - else - # As a last resort, use the slow shell script. Don't cache a - # value for MKDIR_P within a source directory, because that will - # break other packages using the cache if that directory is - # removed, or if the value is a relative name. - MKDIR_P="$ac_install_sh -d" - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 -$as_echo "$MKDIR_P" >&6; } - -for ac_prog in gawk mawk nawk awk -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_AWK+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$AWK"; then - ac_cv_prog_AWK="$AWK" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_AWK="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -AWK=$ac_cv_prog_AWK -if test -n "$AWK"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 -$as_echo "$AWK" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$AWK" && break -done - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 -$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } -set x ${MAKE-make} -ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` -if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat >conftest.make <<\_ACEOF -SHELL = /bin/sh -all: - @echo '@@@%%%=$(MAKE)=@@@%%%' -_ACEOF -# GNU make sometimes prints "make[1]: Entering ...", which would confuse us. -case `${MAKE-make} -f conftest.make 2>/dev/null` in - *@@@%%%=?*=@@@%%%*) - eval ac_cv_prog_make_${ac_make}_set=yes;; - *) - eval ac_cv_prog_make_${ac_make}_set=no;; -esac -rm -f conftest.make -fi -if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - SET_MAKE= -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - SET_MAKE="MAKE=${MAKE-make}" -fi - -rm -rf .tst 2>/dev/null -mkdir .tst 2>/dev/null -if test -d .tst; then - am__leading_dot=. -else - am__leading_dot=_ -fi -rmdir .tst 2>/dev/null - -# Check whether --enable-silent-rules was given. -if test "${enable_silent_rules+set}" = set; then : - enableval=$enable_silent_rules; -fi - -case $enable_silent_rules in # ((( - yes) AM_DEFAULT_VERBOSITY=0;; - no) AM_DEFAULT_VERBOSITY=1;; - *) AM_DEFAULT_VERBOSITY=1;; -esac -am_make=${MAKE-make} -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5 -$as_echo_n "checking whether $am_make supports nested variables... " >&6; } -if ${am_cv_make_support_nested_variables+:} false; then : - $as_echo_n "(cached) " >&6 -else - if $as_echo 'TRUE=$(BAR$(V)) -BAR0=false -BAR1=true -V=1 -am__doit: - @$(TRUE) -.PHONY: am__doit' | $am_make -f - >/dev/null 2>&1; then - am_cv_make_support_nested_variables=yes -else - am_cv_make_support_nested_variables=no -fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_make_support_nested_variables" >&5 -$as_echo "$am_cv_make_support_nested_variables" >&6; } -if test $am_cv_make_support_nested_variables = yes; then - AM_V='$(V)' - AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' -else - AM_V=$AM_DEFAULT_VERBOSITY - AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY -fi -AM_BACKSLASH='\' - -if test "`cd $srcdir && pwd`" != "`pwd`"; then - # Use -I$(srcdir) only when $(srcdir) != ., so that make's output - # is not polluted with repeated "-I." - am__isrc=' -I$(srcdir)' - # test to see if srcdir already configured - if test -f $srcdir/config.status; then - as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 - fi -fi - -# test whether we have cygpath -if test -z "$CYGPATH_W"; then - if (cygpath --version) >/dev/null 2>/dev/null; then - CYGPATH_W='cygpath -w' - else - CYGPATH_W=echo - fi -fi - - -# Define the identity of the package. - PACKAGE='consumer-verification' - VERSION='0.0.0' - - -cat >>confdefs.h <<_ACEOF -#define PACKAGE "$PACKAGE" -_ACEOF - - -cat >>confdefs.h <<_ACEOF -#define VERSION "$VERSION" -_ACEOF - -# Some tools Automake needs. - -ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} - - -AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} - - -AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} - - -AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} - - -MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} - -# For better backward compatibility. To be removed once Automake 1.9.x -# dies out for good. For more background, see: -# -# -mkdir_p='$(MKDIR_P)' - -# We need awk for the "check" target (and possibly the TAP driver). The -# system "awk" is bad on some platforms. -# Always define AMTAR for backward compatibility. Yes, it's still used -# in the wild :-( We should find a proper way to deprecate it ... -AMTAR='$${TAR-tar}' - - -# We'll loop over all known methods to create a tar archive until one works. -_am_tools='gnutar pax cpio none' - -am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -' - - - - - - -# POSIX will say in a future version that running "rm -f" with no argument -# is OK; and we want to be able to make that assumption in our Makefile -# recipes. So use an aggressive probe to check that the usage we want is -# actually supported "in the wild" to an acceptable degree. -# See automake bug#10828. -# To make any issue more visible, cause the running configure to be aborted -# by default if the 'rm' program in use doesn't match our expectations; the -# user can still override this though. -if rm -f && rm -fr && rm -rf; then : OK; else - cat >&2 <<'END' -Oops! - -Your 'rm' program seems unable to run without file operands specified -on the command line, even when the '-f' option is present. This is contrary -to the behaviour of most rm programs out there, and not conforming with -the upcoming POSIX standard: - -Please tell bug-automake@gnu.org about your system, including the value -of your $PATH and any error possibly output before this message. This -can help us improve future automake versions. - -END - if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then - echo 'Configuration will proceed anyway, since you have set the' >&2 - echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 - echo >&2 - else - cat >&2 <<'END' -Aborting the configuration process, to ensure you take notice of the issue. - -You can download and install GNU coreutils to get an 'rm' implementation -that behaves properly: . - -If you want to complete the configuration process using your problematic -'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM -to "yes", and re-run configure. - -END - as_fn_error $? "Your 'rm' program is bad, sorry." "$LINENO" 5 - fi -fi - -#*************************************************************************** -# _ _ ____ _ -# Project ___| | | | _ \| | -# / __| | | | |_) | | -# | (__| |_| | _ <| |___ -# \___|\___/|_| \_\_____| -# -# Copyright (C) 2006, David Shaw -# -# This software is licensed as described in the file COPYING, which -# you should have received as part of this distribution. The terms -# are also available at https://curl.haxx.se/docs/copyright.html. -# -# You may opt to use, copy, modify, merge, publish, distribute and/or sell -# copies of the Software, and permit persons to whom the Software is -# furnished to do so, under the terms of the COPYING file. -# -# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY -# KIND, either express or implied. -# -########################################################################### -# LIBCURL_CHECK_CONFIG ([DEFAULT-ACTION], [MINIMUM-VERSION], -# [ACTION-IF-YES], [ACTION-IF-NO]) -# ---------------------------------------------------------- -# David Shaw May-09-2006 -# -# Checks for libcurl. DEFAULT-ACTION is the string yes or no to -# specify whether to default to --with-libcurl or --without-libcurl. -# If not supplied, DEFAULT-ACTION is yes. MINIMUM-VERSION is the -# minimum version of libcurl to accept. Pass the version as a regular -# version number like 7.10.1. If not supplied, any version is -# accepted. ACTION-IF-YES is a list of shell commands to run if -# libcurl was successfully found and passed the various tests. -# ACTION-IF-NO is a list of shell commands that are run otherwise. -# Note that using --without-libcurl does run ACTION-IF-NO. -# -# This macro #defines HAVE_LIBCURL if a working libcurl setup is -# found, and sets @LIBCURL@ and @LIBCURL_CPPFLAGS@ to the necessary -# values. Other useful defines are LIBCURL_FEATURE_xxx where xxx are -# the various features supported by libcurl, and LIBCURL_PROTOCOL_yyy -# where yyy are the various protocols supported by libcurl. Both xxx -# and yyy are capitalized. See the list of AH_TEMPLATEs at the top of -# the macro for the complete list of possible defines. Shell -# variables $libcurl_feature_xxx and $libcurl_protocol_yyy are also -# defined to 'yes' for those features and protocols that were found. -# Note that xxx and yyy keep the same capitalization as in the -# curl-config list (e.g. it's "HTTP" and not "http"). -# -# Users may override the detected values by doing something like: -# LIBCURL="-lcurl" LIBCURL_CPPFLAGS="-I/usr/myinclude" ./configure -# -# For the sake of sanity, this macro assumes that any libcurl that is -# found is after version 7.7.2, the first version that included the -# curl-config script. Note that it is very important for people -# packaging binary versions of libcurl to include this script! -# Without curl-config, we can only guess what protocols are available, -# or use curl_version_info to figure it out at runtime. - - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. -set dummy ${ac_tool_prefix}gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="${ac_tool_prefix}gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_CC"; then - ac_ct_CC=$CC - # Extract the first word of "gcc", so it can be a program name with args. -set dummy gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_CC="gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -else - CC="$ac_cv_prog_CC" -fi - -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. -set dummy ${ac_tool_prefix}cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="${ac_tool_prefix}cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - fi -fi -if test -z "$CC"; then - # Extract the first word of "cc", so it can be a program name with args. -set dummy cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else - ac_prog_rejected=no -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then - ac_prog_rejected=yes - continue - fi - ac_cv_prog_CC="cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -if test $ac_prog_rejected = yes; then - # We found a bogon in the path, so make sure we never use it. - set dummy $ac_cv_prog_CC - shift - if test $# != 0; then - # We chose a different compiler from the bogus one. - # However, it has the same basename, so the bogon will be chosen - # first if we set CC to just the basename; use the full file name. - shift - ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" - fi -fi -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - for ac_prog in cl.exe - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$CC" && break - done -fi -if test -z "$CC"; then - ac_ct_CC=$CC - for ac_prog in cl.exe -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_CC="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_CC" && break -done - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -fi - -fi - - -test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "no acceptable C compiler found in \$PATH -See \`config.log' for more details" "$LINENO" 5; } - -# Provide some information about the compiler. -$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 -set X $ac_compile -ac_compiler=$2 -for ac_option in --version -v -V -qversion; do - { { ac_try="$ac_compiler $ac_option >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compiler $ac_option >&5") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - sed '10a\ -... rest of stderr output deleted ... - 10q' conftest.err >conftest.er1 - cat conftest.er1 >&5 - fi - rm -f conftest.er1 conftest.err - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -done - -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" -# Try to create an executable without -o first, disregard a.out. -# It will help us diagnose broken compilers, and finding out an intuition -# of exeext. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 -$as_echo_n "checking whether the C compiler works... " >&6; } -ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` - -# The possible output files: -ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" - -ac_rmfiles= -for ac_file in $ac_files -do - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - * ) ac_rmfiles="$ac_rmfiles $ac_file";; - esac -done -rm -f $ac_rmfiles - -if { { ac_try="$ac_link_default" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link_default") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. -# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' -# in a Makefile. We should not override ac_cv_exeext if it was cached, -# so that the user can short-circuit this test for compilers unknown to -# Autoconf. -for ac_file in $ac_files '' -do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) - ;; - [ab].out ) - # We found the default executable, but exeext='' is most - # certainly right. - break;; - *.* ) - if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; - then :; else - ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - fi - # We set ac_cv_exeext here because the later test for it is not - # safe: cross compilers may not add the suffix if given an `-o' - # argument, so we may need to know it at that point already. - # Even if this section looks crufty: it has the advantage of - # actually working. - break;; - * ) - break;; - esac -done -test "$ac_cv_exeext" = no && ac_cv_exeext= - -else - ac_file='' -fi -if test -z "$ac_file"; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -$as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error 77 "C compiler cannot create executables -See \`config.log' for more details" "$LINENO" 5; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 -$as_echo_n "checking for C compiler default output file name... " >&6; } -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 -$as_echo "$ac_file" >&6; } -ac_exeext=$ac_cv_exeext - -rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 -$as_echo_n "checking for suffix of executables... " >&6; } -if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # If both `conftest.exe' and `conftest' are `present' (well, observable) -# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will -# work properly (i.e., refer to `conftest.exe'), while it won't with -# `rm'. -for ac_file in conftest.exe conftest conftest.*; do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - break;; - * ) break;; - esac -done -else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of executables: cannot compile and link -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest conftest$ac_cv_exeext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 -$as_echo "$ac_cv_exeext" >&6; } - -rm -f conftest.$ac_ext -EXEEXT=$ac_cv_exeext -ac_exeext=$EXEEXT -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -int -main () -{ -FILE *f = fopen ("conftest.out", "w"); - return ferror (f) || fclose (f) != 0; - - ; - return 0; -} -_ACEOF -ac_clean_files="$ac_clean_files conftest.out" -# Check that the compiler produces executables we can run. If not, either -# the compiler is broken, or we cross compile. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 -$as_echo_n "checking whether we are cross compiling... " >&6; } -if test "$cross_compiling" != yes; then - { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - if { ac_try='./conftest$ac_cv_exeext' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then - cross_compiling=no - else - if test "$cross_compiling" = maybe; then - cross_compiling=yes - else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot run C compiled programs. -If you meant to cross compile, use \`--host'. -See \`config.log' for more details" "$LINENO" 5; } - fi - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 -$as_echo "$cross_compiling" >&6; } - -rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 -$as_echo_n "checking for suffix of object files... " >&6; } -if ${ac_cv_objext+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -rm -f conftest.o conftest.obj -if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - for ac_file in conftest.o conftest.obj conftest.*; do - test -f "$ac_file" || continue; - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; - *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` - break;; - esac -done -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of object files: cannot compile -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest.$ac_cv_objext conftest.$ac_ext -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 -$as_echo "$ac_cv_objext" >&6; } -OBJEXT=$ac_cv_objext -ac_objext=$OBJEXT -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 -$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } -if ${ac_cv_c_compiler_gnu+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ -#ifndef __GNUC__ - choke me -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_compiler_gnu=yes -else - ac_compiler_gnu=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -ac_cv_c_compiler_gnu=$ac_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 -$as_echo "$ac_cv_c_compiler_gnu" >&6; } -if test $ac_compiler_gnu = yes; then - GCC=yes -else - GCC= -fi -ac_test_CFLAGS=${CFLAGS+set} -ac_save_CFLAGS=$CFLAGS -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 -$as_echo_n "checking whether $CC accepts -g... " >&6; } -if ${ac_cv_prog_cc_g+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_save_c_werror_flag=$ac_c_werror_flag - ac_c_werror_flag=yes - ac_cv_prog_cc_g=no - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -else - CFLAGS="" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - -else - ac_c_werror_flag=$ac_save_c_werror_flag - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - ac_c_werror_flag=$ac_save_c_werror_flag -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 -$as_echo "$ac_cv_prog_cc_g" >&6; } -if test "$ac_test_CFLAGS" = set; then - CFLAGS=$ac_save_CFLAGS -elif test $ac_cv_prog_cc_g = yes; then - if test "$GCC" = yes; then - CFLAGS="-g -O2" - else - CFLAGS="-g" - fi -else - if test "$GCC" = yes; then - CFLAGS="-O2" - else - CFLAGS= - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 -$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } -if ${ac_cv_prog_cc_c89+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_cv_prog_cc_c89=no -ac_save_CC=$CC -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -struct stat; -/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ -struct buf { int x; }; -FILE * (*rcsopen) (struct buf *, struct stat *, int); -static char *e (p, i) - char **p; - int i; -{ - return p[i]; -} -static char *f (char * (*g) (char **, int), char **p, ...) -{ - char *s; - va_list v; - va_start (v,p); - s = g (p, va_arg (v,int)); - va_end (v); - return s; -} - -/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has - function prototypes and stuff, but not '\xHH' hex character constants. - These don't provoke an error unfortunately, instead are silently treated - as 'x'. The following induces an error, until -std is added to get - proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an - array size at least. It's necessary to write '\x00'==0 to get something - that's true only with -std. */ -int osf4_cc_array ['\x00' == 0 ? 1 : -1]; - -/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters - inside strings and character constants. */ -#define FOO(x) 'x' -int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; - -int test (int i, double x); -struct s1 {int (*f) (int a);}; -struct s2 {int (*f) (double a);}; -int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); -int argc; -char **argv; -int -main () -{ -return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; - ; - return 0; -} -_ACEOF -for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ - -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" -do - CC="$ac_save_CC $ac_arg" - if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_c89=$ac_arg -fi -rm -f core conftest.err conftest.$ac_objext - test "x$ac_cv_prog_cc_c89" != "xno" && break -done -rm -f conftest.$ac_ext -CC=$ac_save_CC - -fi -# AC_CACHE_VAL -case "x$ac_cv_prog_cc_c89" in - x) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 -$as_echo "none needed" >&6; } ;; - xno) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 -$as_echo "unsupported" >&6; } ;; - *) - CC="$CC $ac_cv_prog_cc_c89" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 -$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; -esac -if test "x$ac_cv_prog_cc_c89" != xno; then : - -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5 -$as_echo_n "checking whether $CC understands -c and -o together... " >&6; } -if ${am_cv_prog_cc_c_o+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF - # Make sure it works both with $CC and with simple cc. - # Following AC_PROG_CC_C_O, we do the test twice because some - # compilers refuse to overwrite an existing .o file with -o, - # though they will create one. - am_cv_prog_cc_c_o=yes - for am_i in 1 2; do - if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5 - ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } \ - && test -f conftest2.$ac_objext; then - : OK - else - am_cv_prog_cc_c_o=no - break - fi - done - rm -f core conftest* - unset am_i -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5 -$as_echo "$am_cv_prog_cc_c_o" >&6; } -if test "$am_cv_prog_cc_c_o" != yes; then - # Losing compiler, so override with the script. - # FIXME: It is wrong to rewrite CC. - # But if we don't then we get into trouble of one sort or another. - # A longer-term fix would be to have automake use am__CC in this case, - # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" - CC="$am_aux_dir/compile $CC" -fi -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - -DEPDIR="${am__leading_dot}deps" - -ac_config_commands="$ac_config_commands depfiles" - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} supports the include directive" >&5 -$as_echo_n "checking whether ${MAKE-make} supports the include directive... " >&6; } -cat > confinc.mk << 'END' -am__doit: - @echo this is the am__doit target >confinc.out -.PHONY: am__doit -END -am__include="#" -am__quote= -# BSD make does it like this. -echo '.include "confinc.mk" # ignored' > confmf.BSD -# Other make implementations (GNU, Solaris 10, AIX) do it like this. -echo 'include confinc.mk # ignored' > confmf.GNU -_am_result=no -for s in GNU BSD; do - { echo "$as_me:$LINENO: ${MAKE-make} -f confmf.$s && cat confinc.out" >&5 - (${MAKE-make} -f confmf.$s && cat confinc.out) >&5 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } - case $?:`cat confinc.out 2>/dev/null` in #( - '0:this is the am__doit target') : - case $s in #( - BSD) : - am__include='.include' am__quote='"' ;; #( - *) : - am__include='include' am__quote='' ;; -esac ;; #( - *) : - ;; -esac - if test "$am__include" != "#"; then - _am_result="yes ($s style)" - break - fi -done -rm -f confinc.* confmf.* -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: ${_am_result}" >&5 -$as_echo "${_am_result}" >&6; } - -# Check whether --enable-dependency-tracking was given. -if test "${enable_dependency_tracking+set}" = set; then : - enableval=$enable_dependency_tracking; -fi - -if test "x$enable_dependency_tracking" != xno; then - am_depcomp="$ac_aux_dir/depcomp" - AMDEPBACKSLASH='\' - am__nodep='_no' -fi - if test "x$enable_dependency_tracking" != xno; then - AMDEP_TRUE= - AMDEP_FALSE='#' -else - AMDEP_TRUE='#' - AMDEP_FALSE= -fi - - - -depcc="$CC" am_compiler_list= - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 -$as_echo_n "checking dependency style of $depcc... " >&6; } -if ${am_cv_CC_dependencies_compiler_type+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then - # We make a subdir and do the tests there. Otherwise we can end up - # making bogus files that we don't know about and never remove. For - # instance it was reported that on HP-UX the gcc test will end up - # making a dummy file named 'D' -- because '-MD' means "put the output - # in D". - rm -rf conftest.dir - mkdir conftest.dir - # Copy depcomp to subdir because otherwise we won't find it if we're - # using a relative directory. - cp "$am_depcomp" conftest.dir - cd conftest.dir - # We will build objects and dependencies in a subdirectory because - # it helps to detect inapplicable dependency modes. For instance - # both Tru64's cc and ICC support -MD to output dependencies as a - # side effect of compilation, but ICC will put the dependencies in - # the current directory while Tru64 will put them in the object - # directory. - mkdir sub - - am_cv_CC_dependencies_compiler_type=none - if test "$am_compiler_list" = ""; then - am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` - fi - am__universal=false - case " $depcc " in #( - *\ -arch\ *\ -arch\ *) am__universal=true ;; - esac - - for depmode in $am_compiler_list; do - # Setup a source with many dependencies, because some compilers - # like to wrap large dependency lists on column 80 (with \), and - # we should not choose a depcomp mode which is confused by this. - # - # We need to recreate these files for each test, as the compiler may - # overwrite some of them when testing with obscure command lines. - # This happens at least with the AIX C compiler. - : > sub/conftest.c - for i in 1 2 3 4 5 6; do - echo '#include "conftst'$i'.h"' >> sub/conftest.c - # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with - # Solaris 10 /bin/sh. - echo '/* dummy */' > sub/conftst$i.h - done - echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf - - # We check with '-c' and '-o' for the sake of the "dashmstdout" - # mode. It turns out that the SunPro C++ compiler does not properly - # handle '-M -o', and we need to detect this. Also, some Intel - # versions had trouble with output in subdirs. - am__obj=sub/conftest.${OBJEXT-o} - am__minus_obj="-o $am__obj" - case $depmode in - gcc) - # This depmode causes a compiler race in universal mode. - test "$am__universal" = false || continue - ;; - nosideeffect) - # After this tag, mechanisms are not by side-effect, so they'll - # only be used when explicitly requested. - if test "x$enable_dependency_tracking" = xyes; then - continue - else - break - fi - ;; - msvc7 | msvc7msys | msvisualcpp | msvcmsys) - # This compiler won't grok '-c -o', but also, the minuso test has - # not run yet. These depmodes are late enough in the game, and - # so weak that their functioning should not be impacted. - am__obj=conftest.${OBJEXT-o} - am__minus_obj= - ;; - none) break ;; - esac - if depmode=$depmode \ - source=sub/conftest.c object=$am__obj \ - depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ - $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ - >/dev/null 2>conftest.err && - grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && - grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && - grep $am__obj sub/conftest.Po > /dev/null 2>&1 && - ${MAKE-make} -s -f confmf > /dev/null 2>&1; then - # icc doesn't choke on unknown options, it will just issue warnings - # or remarks (even with -Werror). So we grep stderr for any message - # that says an option was ignored or not supported. - # When given -MP, icc 7.0 and 7.1 complain thusly: - # icc: Command line warning: ignoring option '-M'; no argument required - # The diagnosis changed in icc 8.0: - # icc: Command line remark: option '-MP' not supported - if (grep 'ignoring option' conftest.err || - grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else - am_cv_CC_dependencies_compiler_type=$depmode - break - fi - fi - done - - cd .. - rm -rf conftest.dir -else - am_cv_CC_dependencies_compiler_type=none -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 -$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } -CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type - - if - test "x$enable_dependency_tracking" != xno \ - && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then - am__fastdepCC_TRUE= - am__fastdepCC_FALSE='#' -else - am__fastdepCC_TRUE='#' - am__fastdepCC_FALSE= -fi - - - - - - - - - - - - - - - - - - - - - - - - - - - - -# Check whether --with-libcurl was given. -if test "${with_libcurl+set}" = set; then : - withval=$with_libcurl; _libcurl_with=$withval -else - _libcurl_with=yes -fi - - - if test "$_libcurl_with" != "no" ; then - - for ac_prog in gawk mawk nawk awk -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_AWK+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$AWK"; then - ac_cv_prog_AWK="$AWK" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_AWK="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -AWK=$ac_cv_prog_AWK -if test -n "$AWK"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 -$as_echo "$AWK" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$AWK" && break -done - - - _libcurl_version_parse="eval $AWK '{split(\$NF,A,\".\"); X=256*256*A[1]+256*A[2]+A[3]; print X;}'" - - _libcurl_try_link=yes - - if test -d "$_libcurl_with" ; then - LIBCURL_CPPFLAGS="-I$withval/include" - _libcurl_ldflags="-L$withval/lib" - # Extract the first word of "curl-config", so it can be a program name with args. -set dummy curl-config; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_path__libcurl_config+:} false; then : - $as_echo_n "(cached) " >&6 -else - case $_libcurl_config in - [\\/]* | ?:[\\/]*) - ac_cv_path__libcurl_config="$_libcurl_config" # Let the user override the test with a path. - ;; - *) - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in "$withval/bin" -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_path__libcurl_config="$as_dir/$ac_word$ac_exec_ext" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - - ;; -esac -fi -_libcurl_config=$ac_cv_path__libcurl_config -if test -n "$_libcurl_config"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $_libcurl_config" >&5 -$as_echo "$_libcurl_config" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - else - # Extract the first word of "curl-config", so it can be a program name with args. -set dummy curl-config; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_path__libcurl_config+:} false; then : - $as_echo_n "(cached) " >&6 -else - case $_libcurl_config in - [\\/]* | ?:[\\/]*) - ac_cv_path__libcurl_config="$_libcurl_config" # Let the user override the test with a path. - ;; - *) - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_path__libcurl_config="$as_dir/$ac_word$ac_exec_ext" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - - ;; -esac -fi -_libcurl_config=$ac_cv_path__libcurl_config -if test -n "$_libcurl_config"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $_libcurl_config" >&5 -$as_echo "$_libcurl_config" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - fi - - if test x$_libcurl_config != "x" ; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for the version of libcurl" >&5 -$as_echo_n "checking for the version of libcurl... " >&6; } -if ${libcurl_cv_lib_curl_version+:} false; then : - $as_echo_n "(cached) " >&6 -else - libcurl_cv_lib_curl_version=`$_libcurl_config --version | $AWK '{print $2}'` -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libcurl_cv_lib_curl_version" >&5 -$as_echo "$libcurl_cv_lib_curl_version" >&6; } - - _libcurl_version=`echo $libcurl_cv_lib_curl_version | $_libcurl_version_parse` - _libcurl_wanted=`echo 0 | $_libcurl_version_parse` - - if test $_libcurl_wanted -gt 0 ; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libcurl >= version " >&5 -$as_echo_n "checking for libcurl >= version ... " >&6; } -if ${libcurl_cv_lib_version_ok+:} false; then : - $as_echo_n "(cached) " >&6 -else - - if test $_libcurl_version -ge $_libcurl_wanted ; then - libcurl_cv_lib_version_ok=yes - else - libcurl_cv_lib_version_ok=no - fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libcurl_cv_lib_version_ok" >&5 -$as_echo "$libcurl_cv_lib_version_ok" >&6; } - fi - - if test $_libcurl_wanted -eq 0 || test x$libcurl_cv_lib_version_ok = xyes ; then - if test x"$LIBCURL_CPPFLAGS" = "x" ; then - LIBCURL_CPPFLAGS=`$_libcurl_config --cflags` - fi - if test x"$LIBCURL" = "x" ; then - LIBCURL=`$_libcurl_config --libs` - - # This is so silly, but Apple actually has a bug in their - # curl-config script. Fixed in Tiger, but there are still - # lots of Panther installs around. - case "${host}" in - powerpc-apple-darwin7*) - LIBCURL=`echo $LIBCURL | sed -e 's|-arch i386||g'` - ;; - esac - fi - - # All curl-config scripts support --feature - _libcurl_features=`$_libcurl_config --feature` - - # Is it modern enough to have --protocols? (7.12.4) - if test $_libcurl_version -ge 461828 ; then - _libcurl_protocols=`$_libcurl_config --protocols` - fi - else - _libcurl_try_link=no - fi - - unset _libcurl_wanted - fi - - if test $_libcurl_try_link = yes ; then - - # we didn't find curl-config, so let's see if the user-supplied - # link line (or failing that, "-lcurl") is enough. - LIBCURL=${LIBCURL-"$_libcurl_ldflags -lcurl"} - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether libcurl is usable" >&5 -$as_echo_n "checking whether libcurl is usable... " >&6; } -if ${libcurl_cv_lib_curl_usable+:} false; then : - $as_echo_n "(cached) " >&6 -else - - _libcurl_save_cppflags=$CPPFLAGS - CPPFLAGS="$LIBCURL_CPPFLAGS $CPPFLAGS" - _libcurl_save_libs=$LIBS - LIBS="$LIBCURL $LIBS" - - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -int -main () -{ - -/* Try and use a few common options to force a failure if we are - missing symbols or can't link. */ -int x; -curl_easy_setopt(NULL,CURLOPT_URL,NULL); -x=CURL_ERROR_SIZE; -x=CURLOPT_WRITEFUNCTION; -x=CURLOPT_WRITEDATA; -x=CURLOPT_ERRORBUFFER; -x=CURLOPT_STDERR; -x=CURLOPT_VERBOSE; -if (x) {;} - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - libcurl_cv_lib_curl_usable=yes -else - libcurl_cv_lib_curl_usable=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - - CPPFLAGS=$_libcurl_save_cppflags - LIBS=$_libcurl_save_libs - unset _libcurl_save_cppflags - unset _libcurl_save_libs - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libcurl_cv_lib_curl_usable" >&5 -$as_echo "$libcurl_cv_lib_curl_usable" >&6; } - - if test $libcurl_cv_lib_curl_usable = yes ; then - - # Does curl_free() exist in this version of libcurl? - # If not, fake it with free() - - _libcurl_save_cppflags=$CPPFLAGS - CPPFLAGS="$CPPFLAGS $LIBCURL_CPPFLAGS" - _libcurl_save_libs=$LIBS - LIBS="$LIBS $LIBCURL" - - ac_fn_c_check_func "$LINENO" "curl_free" "ac_cv_func_curl_free" -if test "x$ac_cv_func_curl_free" = xyes; then : - -else - -$as_echo "#define curl_free free" >>confdefs.h - -fi - - - CPPFLAGS=$_libcurl_save_cppflags - LIBS=$_libcurl_save_libs - unset _libcurl_save_cppflags - unset _libcurl_save_libs - - -$as_echo "#define HAVE_LIBCURL 1" >>confdefs.h - - - - - for _libcurl_feature in $_libcurl_features ; do - cat >>confdefs.h <<_ACEOF -#define `$as_echo "libcurl_feature_$_libcurl_feature" | $as_tr_cpp` 1 -_ACEOF - - eval `$as_echo "libcurl_feature_$_libcurl_feature" | $as_tr_sh`=yes - done - - if test "x$_libcurl_protocols" = "x" ; then - - # We don't have --protocols, so just assume that all - # protocols are available - _libcurl_protocols="HTTP FTP FILE TELNET LDAP DICT TFTP" - - if test x$libcurl_feature_SSL = xyes ; then - _libcurl_protocols="$_libcurl_protocols HTTPS" - - # FTPS wasn't standards-compliant until version - # 7.11.0 (0x070b00 == 461568) - if test $_libcurl_version -ge 461568; then - _libcurl_protocols="$_libcurl_protocols FTPS" - fi - fi - - # RTSP, IMAP, POP3 and SMTP were added in - # 7.20.0 (0x071400 == 463872) - if test $_libcurl_version -ge 463872; then - _libcurl_protocols="$_libcurl_protocols RTSP IMAP POP3 SMTP" - fi - fi - - for _libcurl_protocol in $_libcurl_protocols ; do - cat >>confdefs.h <<_ACEOF -#define `$as_echo "libcurl_protocol_$_libcurl_protocol" | $as_tr_cpp` 1 -_ACEOF - - eval `$as_echo "libcurl_protocol_$_libcurl_protocol" | $as_tr_sh`=yes - done - else - unset LIBCURL - unset LIBCURL_CPPFLAGS - fi - fi - - unset _libcurl_try_link - unset _libcurl_version_parse - unset _libcurl_config - unset _libcurl_feature - unset _libcurl_features - unset _libcurl_protocol - unset _libcurl_protocols - unset _libcurl_version - unset _libcurl_ldflags - fi - - if test x$_libcurl_with = xno || test x$libcurl_cv_lib_curl_usable != xyes ; then - # This is the IF-NO path - as_fn_error $? "libcurl development files required" "$LINENO" 5 - else - # This is the IF-YES path - : - fi - - unset _libcurl_with - -ac_config_headers="$ac_config_headers config.h" - -ac_config_files="$ac_config_files Makefile src/Makefile" - -cat >confcache <<\_ACEOF -# This file is a shell script that caches the results of configure -# tests run on this system so they can be shared between configure -# scripts and configure runs, see configure's option --config-cache. -# It is not useful on other systems. If it contains results you don't -# want to keep, you may remove or edit it. -# -# config.status only pays attention to the cache file if you give it -# the --recheck option to rerun configure. -# -# `ac_cv_env_foo' variables (set or unset) will be overridden when -# loading this file, other *unset* `ac_cv_foo' will be assigned the -# following values. - -_ACEOF - -# The following way of writing the cache mishandles newlines in values, -# but we know of no workaround that is simple, portable, and efficient. -# So, we kill variables containing newlines. -# Ultrix sh set writes to stderr and can't be redirected directly, -# and sets the high bit in the cache file unless we assign to the vars. -( - for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - - (set) 2>&1 | - case $as_nl`(ac_space=' '; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - # `set' does not quote correctly, so add quotes: double-quote - # substitution turns \\\\ into \\, and sed turns \\ into \. - sed -n \ - "s/'/'\\\\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" - ;; #( - *) - # `set' quotes correctly as required by POSIX, so do not add quotes. - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) | - sed ' - /^ac_cv_env_/b end - t clear - :clear - s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ - t end - s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ - :end' >>confcache -if diff "$cache_file" confcache >/dev/null 2>&1; then :; else - if test -w "$cache_file"; then - if test "x$cache_file" != "x/dev/null"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 -$as_echo "$as_me: updating cache $cache_file" >&6;} - if test ! -f "$cache_file" || test -h "$cache_file"; then - cat confcache >"$cache_file" - else - case $cache_file in #( - */* | ?:*) - mv -f confcache "$cache_file"$$ && - mv -f "$cache_file"$$ "$cache_file" ;; #( - *) - mv -f confcache "$cache_file" ;; - esac - fi - fi - else - { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 -$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} - fi -fi -rm -f confcache - -test "x$prefix" = xNONE && prefix=$ac_default_prefix -# Let make expand exec_prefix. -test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' - -DEFS=-DHAVE_CONFIG_H - -ac_libobjs= -ac_ltlibobjs= -U= -for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue - # 1. Remove the extension, and $U if already installed. - ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' - ac_i=`$as_echo "$ac_i" | sed "$ac_script"` - # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR - # will be set to the directory where LIBOBJS objects are built. - as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" - as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' -done -LIBOBJS=$ac_libobjs - -LTLIBOBJS=$ac_ltlibobjs - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking that generated files are newer than configure" >&5 -$as_echo_n "checking that generated files are newer than configure... " >&6; } - if test -n "$am_sleep_pid"; then - # Hide warnings about reused PIDs. - wait $am_sleep_pid 2>/dev/null - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: done" >&5 -$as_echo "done" >&6; } - if test -n "$EXEEXT"; then - am__EXEEXT_TRUE= - am__EXEEXT_FALSE='#' -else - am__EXEEXT_TRUE='#' - am__EXEEXT_FALSE= -fi - -if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then - as_fn_error $? "conditional \"AMDEP\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi -if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then - as_fn_error $? "conditional \"am__fastdepCC\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi - -: "${CONFIG_STATUS=./config.status}" -ac_write_fail=0 -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files $CONFIG_STATUS" -{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 -$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} -as_write_fail=0 -cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 -#! $SHELL -# Generated by $as_me. -# Run this file to recreate the current configuration. -# Compiler output produced by configure, useful for debugging -# configure, is in config.log if it exists. - -debug=false -ac_cs_recheck=false -ac_cs_silent=false - -SHELL=\${CONFIG_SHELL-$SHELL} -export SHELL -_ASEOF -cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -pR' - fi -else - as_ln_s='cp -pR' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -as_test_x='test -x' -as_executable_p=as_fn_executable_p - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -exec 6>&1 -## ----------------------------------- ## -## Main body of $CONFIG_STATUS script. ## -## ----------------------------------- ## -_ASEOF -test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# Save the log message, to keep $0 and so on meaningful, and to -# report actual input values of CONFIG_FILES etc. instead of their -# values after options handling. -ac_log=" -This file was extended by consumer-verification $as_me 0.0.0, which was -generated by GNU Autoconf 2.69. Invocation command line was - - CONFIG_FILES = $CONFIG_FILES - CONFIG_HEADERS = $CONFIG_HEADERS - CONFIG_LINKS = $CONFIG_LINKS - CONFIG_COMMANDS = $CONFIG_COMMANDS - $ $0 $@ - -on `(hostname || uname -n) 2>/dev/null | sed 1q` -" - -_ACEOF - -case $ac_config_files in *" -"*) set x $ac_config_files; shift; ac_config_files=$*;; -esac - -case $ac_config_headers in *" -"*) set x $ac_config_headers; shift; ac_config_headers=$*;; -esac - - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -# Files that config.status was made for. -config_files="$ac_config_files" -config_headers="$ac_config_headers" -config_commands="$ac_config_commands" - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -ac_cs_usage="\ -\`$as_me' instantiates files and other configuration actions -from templates according to the current configuration. Unless the files -and actions are specified as TAGs, all are instantiated by default. - -Usage: $0 [OPTION]... [TAG]... - - -h, --help print this help, then exit - -V, --version print version number and configuration settings, then exit - --config print configuration, then exit - -q, --quiet, --silent - do not print progress messages - -d, --debug don't remove temporary files - --recheck update $as_me by reconfiguring in the same conditions - --file=FILE[:TEMPLATE] - instantiate the configuration file FILE - --header=FILE[:TEMPLATE] - instantiate the configuration header FILE - -Configuration files: -$config_files - -Configuration headers: -$config_headers - -Configuration commands: -$config_commands - -Report bugs to ." - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" -ac_cs_version="\\ -consumer-verification config.status 0.0.0 -configured by $0, generated by GNU Autoconf 2.69, - with options \\"\$ac_cs_config\\" - -Copyright (C) 2012 Free Software Foundation, Inc. -This config.status script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it." - -ac_pwd='$ac_pwd' -srcdir='$srcdir' -INSTALL='$INSTALL' -MKDIR_P='$MKDIR_P' -AWK='$AWK' -test -n "\$AWK" || AWK=awk -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# The default lists apply if the user does not specify any file. -ac_need_defaults=: -while test $# != 0 -do - case $1 in - --*=?*) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` - ac_shift=: - ;; - --*=) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg= - ac_shift=: - ;; - *) - ac_option=$1 - ac_optarg=$2 - ac_shift=shift - ;; - esac - - case $ac_option in - # Handling of the options. - -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) - ac_cs_recheck=: ;; - --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) - $as_echo "$ac_cs_version"; exit ;; - --config | --confi | --conf | --con | --co | --c ) - $as_echo "$ac_cs_config"; exit ;; - --debug | --debu | --deb | --de | --d | -d ) - debug=: ;; - --file | --fil | --fi | --f ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - '') as_fn_error $? "missing file argument" ;; - esac - as_fn_append CONFIG_FILES " '$ac_optarg'" - ac_need_defaults=false;; - --header | --heade | --head | --hea ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - as_fn_append CONFIG_HEADERS " '$ac_optarg'" - ac_need_defaults=false;; - --he | --h) - # Conflict between --help and --header - as_fn_error $? "ambiguous option: \`$1' -Try \`$0 --help' for more information.";; - --help | --hel | -h ) - $as_echo "$ac_cs_usage"; exit ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil | --si | --s) - ac_cs_silent=: ;; - - # This is an error. - -*) as_fn_error $? "unrecognized option: \`$1' -Try \`$0 --help' for more information." ;; - - *) as_fn_append ac_config_targets " $1" - ac_need_defaults=false ;; - - esac - shift -done - -ac_configure_extra_args= - -if $ac_cs_silent; then - exec 6>/dev/null - ac_configure_extra_args="$ac_configure_extra_args --silent" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -if \$ac_cs_recheck; then - set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion - shift - \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 - CONFIG_SHELL='$SHELL' - export CONFIG_SHELL - exec "\$@" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -exec 5>>config.log -{ - echo - sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX -## Running $as_me. ## -_ASBOX - $as_echo "$ac_log" -} >&5 - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -# -# INIT-COMMANDS -# -AMDEP_TRUE="$AMDEP_TRUE" MAKE="${MAKE-make}" - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - -# Handling of arguments. -for ac_config_target in $ac_config_targets -do - case $ac_config_target in - "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; - "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; - "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; - "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;; - - *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; - esac -done - - -# If the user did not use the arguments to specify the items to instantiate, -# then the envvar interface is used. Set only those that are not. -# We use the long form for the default assignment because of an extremely -# bizarre bug on SunOS 4.1.3. -if $ac_need_defaults; then - test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files - test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers - test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands -fi - -# Have a temporary directory for convenience. Make it in the build tree -# simply because there is no reason against having it here, and in addition, -# creating and moving files from /tmp can sometimes cause problems. -# Hook for its removal unless debugging. -# Note that there is a small window in which the directory will not be cleaned: -# after its creation but before its name has been assigned to `$tmp'. -$debug || -{ - tmp= ac_tmp= - trap 'exit_status=$? - : "${ac_tmp:=$tmp}" - { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status -' 0 - trap 'as_fn_exit 1' 1 2 13 15 -} -# Create a (secure) tmp directory for tmp files. - -{ - tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && - test -d "$tmp" -} || -{ - tmp=./conf$$-$RANDOM - (umask 077 && mkdir "$tmp") -} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 -ac_tmp=$tmp - -# Set up the scripts for CONFIG_FILES section. -# No need to generate them if there are no CONFIG_FILES. -# This happens for instance with `./config.status config.h'. -if test -n "$CONFIG_FILES"; then - - -ac_cr=`echo X | tr X '\015'` -# On cygwin, bash can eat \r inside `` if the user requested igncr. -# But we know of no other shell where ac_cr would be empty at this -# point, so we can use a bashism as a fallback. -if test "x$ac_cr" = x; then - eval ac_cr=\$\'\\r\' -fi -ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` -if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then - ac_cs_awk_cr='\\r' -else - ac_cs_awk_cr=$ac_cr -fi - -echo 'BEGIN {' >"$ac_tmp/subs1.awk" && -_ACEOF - - -{ - echo "cat >conf$$subs.awk <<_ACEOF" && - echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && - echo "_ACEOF" -} >conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 -ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` -ac_delim='%!_!# ' -for ac_last_try in false false false false false :; do - . ./conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - - ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` - if test $ac_delim_n = $ac_delim_num; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done -rm -f conf$$subs.sh - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && -_ACEOF -sed -n ' -h -s/^/S["/; s/!.*/"]=/ -p -g -s/^[^!]*!// -:repl -t repl -s/'"$ac_delim"'$// -t delim -:nl -h -s/\(.\{148\}\)..*/\1/ -t more1 -s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ -p -n -b repl -:more1 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t nl -:delim -h -s/\(.\{148\}\)..*/\1/ -t more2 -s/["\\]/\\&/g; s/^/"/; s/$/"/ -p -b -:more2 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t delim -' >$CONFIG_STATUS || ac_write_fail=1 -rm -f conf$$subs.awk -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -_ACAWK -cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && - for (key in S) S_is_set[key] = 1 - FS = "" - -} -{ - line = $ 0 - nfields = split(line, field, "@") - substed = 0 - len = length(field[1]) - for (i = 2; i < nfields; i++) { - key = field[i] - keylen = length(key) - if (S_is_set[key]) { - value = S[key] - line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) - len += length(value) + length(field[++i]) - substed = 1 - } else - len += 1 + keylen - } - - print line -} - -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then - sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" -else - cat -fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ - || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 -_ACEOF - -# VPATH may cause trouble with some makes, so we remove sole $(srcdir), -# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and -# trailing colons and then remove the whole line if VPATH becomes empty -# (actually we leave an empty line to preserve line numbers). -if test "x$srcdir" = x.; then - ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ -h -s/// -s/^/:/ -s/[ ]*$/:/ -s/:\$(srcdir):/:/g -s/:\${srcdir}:/:/g -s/:@srcdir@:/:/g -s/^:*// -s/:*$// -x -s/\(=[ ]*\).*/\1/ -G -s/\n// -s/^[^=]*=[ ]*$// -}' -fi - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -fi # test -n "$CONFIG_FILES" - -# Set up the scripts for CONFIG_HEADERS section. -# No need to generate them if there are no CONFIG_HEADERS. -# This happens for instance with `./config.status Makefile'. -if test -n "$CONFIG_HEADERS"; then -cat >"$ac_tmp/defines.awk" <<\_ACAWK || -BEGIN { -_ACEOF - -# Transform confdefs.h into an awk script `defines.awk', embedded as -# here-document in config.status, that substitutes the proper values into -# config.h.in to produce config.h. - -# Create a delimiter string that does not exist in confdefs.h, to ease -# handling of long lines. -ac_delim='%!_!# ' -for ac_last_try in false false :; do - ac_tt=`sed -n "/$ac_delim/p" confdefs.h` - if test -z "$ac_tt"; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done - -# For the awk script, D is an array of macro values keyed by name, -# likewise P contains macro parameters if any. Preserve backslash -# newline sequences. - -ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* -sed -n ' -s/.\{148\}/&'"$ac_delim"'/g -t rset -:rset -s/^[ ]*#[ ]*define[ ][ ]*/ / -t def -d -:def -s/\\$// -t bsnl -s/["\\]/\\&/g -s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ -D["\1"]=" \3"/p -s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p -d -:bsnl -s/["\\]/\\&/g -s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ -D["\1"]=" \3\\\\\\n"\\/p -t cont -s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p -t cont -d -:cont -n -s/.\{148\}/&'"$ac_delim"'/g -t clear -:clear -s/\\$// -t bsnlc -s/["\\]/\\&/g; s/^/"/; s/$/"/p -d -:bsnlc -s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p -b cont -' >$CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - for (key in D) D_is_set[key] = 1 - FS = "" -} -/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { - line = \$ 0 - split(line, arg, " ") - if (arg[1] == "#") { - defundef = arg[2] - mac1 = arg[3] - } else { - defundef = substr(arg[1], 2) - mac1 = arg[2] - } - split(mac1, mac2, "(") #) - macro = mac2[1] - prefix = substr(line, 1, index(line, defundef) - 1) - if (D_is_set[macro]) { - # Preserve the white space surrounding the "#". - print prefix "define", macro P[macro] D[macro] - next - } else { - # Replace #undef with comments. This is necessary, for example, - # in the case of _POSIX_SOURCE, which is predefined and required - # on some systems where configure will not decide to define it. - if (defundef == "undef") { - print "/*", prefix defundef, macro, "*/" - next - } - } -} -{ print } -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 -fi # test -n "$CONFIG_HEADERS" - - -eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" -shift -for ac_tag -do - case $ac_tag in - :[FHLC]) ac_mode=$ac_tag; continue;; - esac - case $ac_mode$ac_tag in - :[FHL]*:*);; - :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; - :[FH]-) ac_tag=-:-;; - :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; - esac - ac_save_IFS=$IFS - IFS=: - set x $ac_tag - IFS=$ac_save_IFS - shift - ac_file=$1 - shift - - case $ac_mode in - :L) ac_source=$1;; - :[FH]) - ac_file_inputs= - for ac_f - do - case $ac_f in - -) ac_f="$ac_tmp/stdin";; - *) # Look for the file first in the build tree, then in the source tree - # (if the path is not absolute). The absolute path cannot be DOS-style, - # because $ac_f cannot contain `:'. - test -f "$ac_f" || - case $ac_f in - [\\/$]*) false;; - *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; - esac || - as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; - esac - case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac - as_fn_append ac_file_inputs " '$ac_f'" - done - - # Let's still pretend it is `configure' which instantiates (i.e., don't - # use $as_me), people would be surprised to read: - # /* config.h. Generated by config.status. */ - configure_input='Generated from '` - $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' - `' by configure.' - if test x"$ac_file" != x-; then - configure_input="$ac_file. $configure_input" - { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 -$as_echo "$as_me: creating $ac_file" >&6;} - fi - # Neutralize special characters interpreted by sed in replacement strings. - case $configure_input in #( - *\&* | *\|* | *\\* ) - ac_sed_conf_input=`$as_echo "$configure_input" | - sed 's/[\\\\&|]/\\\\&/g'`;; #( - *) ac_sed_conf_input=$configure_input;; - esac - - case $ac_tag in - *:-:* | *:-) cat >"$ac_tmp/stdin" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; - esac - ;; - esac - - ac_dir=`$as_dirname -- "$ac_file" || -$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$ac_file" : 'X\(//\)[^/]' \| \ - X"$ac_file" : 'X\(//\)$' \| \ - X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$ac_file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - as_dir="$ac_dir"; as_fn_mkdir_p - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - - case $ac_mode in - :F) - # - # CONFIG_FILE - # - - case $INSTALL in - [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; - *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; - esac - ac_MKDIR_P=$MKDIR_P - case $MKDIR_P in - [\\/$]* | ?:[\\/]* ) ;; - */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; - esac -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# If the template does not know about datarootdir, expand it. -# FIXME: This hack should be removed a few years after 2.60. -ac_datarootdir_hack=; ac_datarootdir_seen= -ac_sed_dataroot=' -/datarootdir/ { - p - q -} -/@datadir@/p -/@docdir@/p -/@infodir@/p -/@localedir@/p -/@mandir@/p' -case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in -*datarootdir*) ac_datarootdir_seen=yes;; -*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 -$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - ac_datarootdir_hack=' - s&@datadir@&$datadir&g - s&@docdir@&$docdir&g - s&@infodir@&$infodir&g - s&@localedir@&$localedir&g - s&@mandir@&$mandir&g - s&\\\${datarootdir}&$datarootdir&g' ;; -esac -_ACEOF - -# Neutralize VPATH when `$srcdir' = `.'. -# Shell code in configure.ac might set extrasub. -# FIXME: do we really want to maintain this feature? -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_sed_extra="$ac_vpsub -$extrasub -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -:t -/@[a-zA-Z_][a-zA-Z_0-9]*@/!b -s|@configure_input@|$ac_sed_conf_input|;t t -s&@top_builddir@&$ac_top_builddir_sub&;t t -s&@top_build_prefix@&$ac_top_build_prefix&;t t -s&@srcdir@&$ac_srcdir&;t t -s&@abs_srcdir@&$ac_abs_srcdir&;t t -s&@top_srcdir@&$ac_top_srcdir&;t t -s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t -s&@builddir@&$ac_builddir&;t t -s&@abs_builddir@&$ac_abs_builddir&;t t -s&@abs_top_builddir@&$ac_abs_top_builddir&;t t -s&@INSTALL@&$ac_INSTALL&;t t -s&@MKDIR_P@&$ac_MKDIR_P&;t t -$ac_datarootdir_hack -" -eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ - >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - -test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && - { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && - { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ - "$ac_tmp/out"`; test -z "$ac_out"; } && - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&5 -$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&2;} - - rm -f "$ac_tmp/stdin" - case $ac_file in - -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; - *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; - esac \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - ;; - :H) - # - # CONFIG_HEADER - # - if test x"$ac_file" != x-; then - { - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" - } >"$ac_tmp/config.h" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then - { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 -$as_echo "$as_me: $ac_file is unchanged" >&6;} - else - rm -f "$ac_file" - mv "$ac_tmp/config.h" "$ac_file" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - fi - else - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ - || as_fn_error $? "could not create -" "$LINENO" 5 - fi -# Compute "$ac_file"'s index in $config_headers. -_am_arg="$ac_file" -_am_stamp_count=1 -for _am_header in $config_headers :; do - case $_am_header in - $_am_arg | $_am_arg:* ) - break ;; - * ) - _am_stamp_count=`expr $_am_stamp_count + 1` ;; - esac -done -echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || -$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$_am_arg" : 'X\(//\)[^/]' \| \ - X"$_am_arg" : 'X\(//\)$' \| \ - X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$_am_arg" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'`/stamp-h$_am_stamp_count - ;; - - :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 -$as_echo "$as_me: executing $ac_file commands" >&6;} - ;; - esac - - - case $ac_file$ac_mode in - "depfiles":C) test x"$AMDEP_TRUE" != x"" || { - # Older Autoconf quotes --file arguments for eval, but not when files - # are listed without --file. Let's play safe and only enable the eval - # if we detect the quoting. - # TODO: see whether this extra hack can be removed once we start - # requiring Autoconf 2.70 or later. - case $CONFIG_FILES in #( - *\'*) : - eval set x "$CONFIG_FILES" ;; #( - *) : - set x $CONFIG_FILES ;; #( - *) : - ;; -esac - shift - # Used to flag and report bootstrapping failures. - am_rc=0 - for am_mf - do - # Strip MF so we end up with the name of the file. - am_mf=`$as_echo "$am_mf" | sed -e 's/:.*$//'` - # Check whether this is an Automake generated Makefile which includes - # dependency-tracking related rules and includes. - # Grep'ing the whole file directly is not great: AIX grep has a line - # limit of 2048, but all sed's we know have understand at least 4000. - sed -n 's,^am--depfiles:.*,X,p' "$am_mf" | grep X >/dev/null 2>&1 \ - || continue - am_dirpart=`$as_dirname -- "$am_mf" || -$as_expr X"$am_mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$am_mf" : 'X\(//\)[^/]' \| \ - X"$am_mf" : 'X\(//\)$' \| \ - X"$am_mf" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$am_mf" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - am_filepart=`$as_basename -- "$am_mf" || -$as_expr X/"$am_mf" : '.*/\([^/][^/]*\)/*$' \| \ - X"$am_mf" : 'X\(//\)$' \| \ - X"$am_mf" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$am_mf" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - { echo "$as_me:$LINENO: cd "$am_dirpart" \ - && sed -e '/# am--include-marker/d' "$am_filepart" \ - | $MAKE -f - am--depfiles" >&5 - (cd "$am_dirpart" \ - && sed -e '/# am--include-marker/d' "$am_filepart" \ - | $MAKE -f - am--depfiles) >&5 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } || am_rc=$? - done - if test $am_rc -ne 0; then - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "Something went wrong bootstrapping makefile fragments - for automatic dependency tracking. If GNU make was not used, consider - re-running the configure script with MAKE=\"gmake\" (or whatever is - necessary). You can also try re-running configure with the - '--disable-dependency-tracking' option to at least be able to build - the package (albeit without support for automatic dependency tracking). -See \`config.log' for more details" "$LINENO" 5; } - fi - { am_dirpart=; unset am_dirpart;} - { am_filepart=; unset am_filepart;} - { am_mf=; unset am_mf;} - { am_rc=; unset am_rc;} - rm -f conftest-deps.mk -} - ;; - - esac -done # for ac_tag - - -as_fn_exit 0 -_ACEOF -ac_clean_files=$ac_clean_files_save - -test $ac_write_fail = 0 || - as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 - - -# configure is writing to config.log, and then calls config.status. -# config.status does its own redirection, appending to config.log. -# Unfortunately, on DOS this fails, as config.log is still kept open -# by configure, so config.status won't be able to write to it; its -# output is simply discarded. So we exec the FD to /dev/null, -# effectively closing config.log, so it can be properly (re)opened and -# appended to by config.status. When coming back to configure, we -# need to make the FD available again. -if test "$no_create" != yes; then - ac_cs_success=: - ac_config_status_args= - test "$silent" = yes && - ac_config_status_args="$ac_config_status_args --quiet" - exec 5>/dev/null - $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false - exec 5>>config.log - # Use ||, not &&, to avoid exiting from the if with $? = 1, which - # would make configure fail if this is the last instruction. - $ac_cs_success || as_fn_exit 1 -fi -if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 -$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} -fi - diff --git a/c/consumer-verification/configure.ac b/c/consumer-verification/configure.ac deleted file mode 100644 index cb88531b8..000000000 --- a/c/consumer-verification/configure.ac +++ /dev/null @@ -1,11 +0,0 @@ -AC_INIT([consumer-verification], [0.0.0], [bug-automake@gnu.org]) -AM_INIT_AUTOMAKE([-Wall -Werror foreign]) -m4_include([libcurl.m4]) -AC_PROG_CC -LIBCURL_CHECK_CONFIG([yes],[],[],[AC_MSG_ERROR([libcurl development files required])]) -AC_CONFIG_HEADERS([config.h]) -AC_CONFIG_FILES([ - Makefile - src/Makefile -]) -AC_OUTPUT diff --git a/c/consumer-verification/depcomp b/c/consumer-verification/depcomp deleted file mode 100755 index fc98710e2..000000000 --- a/c/consumer-verification/depcomp +++ /dev/null @@ -1,791 +0,0 @@ -#! /bin/sh -# depcomp - compile a program generating dependencies as side-effects - -scriptversion=2013-05-30.07; # UTC - -# Copyright (C) 1999-2014 Free Software Foundation, Inc. - -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2, or (at your option) -# any later version. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - -# Originally written by Alexandre Oliva . - -case $1 in - '') - echo "$0: No command. Try '$0 --help' for more information." 1>&2 - exit 1; - ;; - -h | --h*) - cat <<\EOF -Usage: depcomp [--help] [--version] PROGRAM [ARGS] - -Run PROGRAMS ARGS to compile a file, generating dependencies -as side-effects. - -Environment variables: - depmode Dependency tracking mode. - source Source file read by 'PROGRAMS ARGS'. - object Object file output by 'PROGRAMS ARGS'. - DEPDIR directory where to store dependencies. - depfile Dependency file to output. - tmpdepfile Temporary file to use when outputting dependencies. - libtool Whether libtool is used (yes/no). - -Report bugs to . -EOF - exit $? - ;; - -v | --v*) - echo "depcomp $scriptversion" - exit $? - ;; -esac - -# Get the directory component of the given path, and save it in the -# global variables '$dir'. Note that this directory component will -# be either empty or ending with a '/' character. This is deliberate. -set_dir_from () -{ - case $1 in - */*) dir=`echo "$1" | sed -e 's|/[^/]*$|/|'`;; - *) dir=;; - esac -} - -# Get the suffix-stripped basename of the given path, and save it the -# global variable '$base'. -set_base_from () -{ - base=`echo "$1" | sed -e 's|^.*/||' -e 's/\.[^.]*$//'` -} - -# If no dependency file was actually created by the compiler invocation, -# we still have to create a dummy depfile, to avoid errors with the -# Makefile "include basename.Plo" scheme. -make_dummy_depfile () -{ - echo "#dummy" > "$depfile" -} - -# Factor out some common post-processing of the generated depfile. -# Requires the auxiliary global variable '$tmpdepfile' to be set. -aix_post_process_depfile () -{ - # If the compiler actually managed to produce a dependency file, - # post-process it. - if test -f "$tmpdepfile"; then - # Each line is of the form 'foo.o: dependency.h'. - # Do two passes, one to just change these to - # $object: dependency.h - # and one to simply output - # dependency.h: - # which is needed to avoid the deleted-header problem. - { sed -e "s,^.*\.[$lower]*:,$object:," < "$tmpdepfile" - sed -e "s,^.*\.[$lower]*:[$tab ]*,," -e 's,$,:,' < "$tmpdepfile" - } > "$depfile" - rm -f "$tmpdepfile" - else - make_dummy_depfile - fi -} - -# A tabulation character. -tab=' ' -# A newline character. -nl=' -' -# Character ranges might be problematic outside the C locale. -# These definitions help. -upper=ABCDEFGHIJKLMNOPQRSTUVWXYZ -lower=abcdefghijklmnopqrstuvwxyz -digits=0123456789 -alpha=${upper}${lower} - -if test -z "$depmode" || test -z "$source" || test -z "$object"; then - echo "depcomp: Variables source, object and depmode must be set" 1>&2 - exit 1 -fi - -# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po. -depfile=${depfile-`echo "$object" | - sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`} -tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`} - -rm -f "$tmpdepfile" - -# Avoid interferences from the environment. -gccflag= dashmflag= - -# Some modes work just like other modes, but use different flags. We -# parameterize here, but still list the modes in the big case below, -# to make depend.m4 easier to write. Note that we *cannot* use a case -# here, because this file can only contain one case statement. -if test "$depmode" = hp; then - # HP compiler uses -M and no extra arg. - gccflag=-M - depmode=gcc -fi - -if test "$depmode" = dashXmstdout; then - # This is just like dashmstdout with a different argument. - dashmflag=-xM - depmode=dashmstdout -fi - -cygpath_u="cygpath -u -f -" -if test "$depmode" = msvcmsys; then - # This is just like msvisualcpp but w/o cygpath translation. - # Just convert the backslash-escaped backslashes to single forward - # slashes to satisfy depend.m4 - cygpath_u='sed s,\\\\,/,g' - depmode=msvisualcpp -fi - -if test "$depmode" = msvc7msys; then - # This is just like msvc7 but w/o cygpath translation. - # Just convert the backslash-escaped backslashes to single forward - # slashes to satisfy depend.m4 - cygpath_u='sed s,\\\\,/,g' - depmode=msvc7 -fi - -if test "$depmode" = xlc; then - # IBM C/C++ Compilers xlc/xlC can output gcc-like dependency information. - gccflag=-qmakedep=gcc,-MF - depmode=gcc -fi - -case "$depmode" in -gcc3) -## gcc 3 implements dependency tracking that does exactly what -## we want. Yay! Note: for some reason libtool 1.4 doesn't like -## it if -MD -MP comes after the -MF stuff. Hmm. -## Unfortunately, FreeBSD c89 acceptance of flags depends upon -## the command line argument order; so add the flags where they -## appear in depend2.am. Note that the slowdown incurred here -## affects only configure: in makefiles, %FASTDEP% shortcuts this. - for arg - do - case $arg in - -c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;; - *) set fnord "$@" "$arg" ;; - esac - shift # fnord - shift # $arg - done - "$@" - stat=$? - if test $stat -ne 0; then - rm -f "$tmpdepfile" - exit $stat - fi - mv "$tmpdepfile" "$depfile" - ;; - -gcc) -## Note that this doesn't just cater to obsosete pre-3.x GCC compilers. -## but also to in-use compilers like IMB xlc/xlC and the HP C compiler. -## (see the conditional assignment to $gccflag above). -## There are various ways to get dependency output from gcc. Here's -## why we pick this rather obscure method: -## - Don't want to use -MD because we'd like the dependencies to end -## up in a subdir. Having to rename by hand is ugly. -## (We might end up doing this anyway to support other compilers.) -## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like -## -MM, not -M (despite what the docs say). Also, it might not be -## supported by the other compilers which use the 'gcc' depmode. -## - Using -M directly means running the compiler twice (even worse -## than renaming). - if test -z "$gccflag"; then - gccflag=-MD, - fi - "$@" -Wp,"$gccflag$tmpdepfile" - stat=$? - if test $stat -ne 0; then - rm -f "$tmpdepfile" - exit $stat - fi - rm -f "$depfile" - echo "$object : \\" > "$depfile" - # The second -e expression handles DOS-style file names with drive - # letters. - sed -e 's/^[^:]*: / /' \ - -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile" -## This next piece of magic avoids the "deleted header file" problem. -## The problem is that when a header file which appears in a .P file -## is deleted, the dependency causes make to die (because there is -## typically no way to rebuild the header). We avoid this by adding -## dummy dependencies for each header file. Too bad gcc doesn't do -## this for us directly. -## Some versions of gcc put a space before the ':'. On the theory -## that the space means something, we add a space to the output as -## well. hp depmode also adds that space, but also prefixes the VPATH -## to the object. Take care to not repeat it in the output. -## Some versions of the HPUX 10.20 sed can't process this invocation -## correctly. Breaking it into two sed invocations is a workaround. - tr ' ' "$nl" < "$tmpdepfile" \ - | sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \ - | sed -e 's/$/ :/' >> "$depfile" - rm -f "$tmpdepfile" - ;; - -hp) - # This case exists only to let depend.m4 do its work. It works by - # looking at the text of this script. This case will never be run, - # since it is checked for above. - exit 1 - ;; - -sgi) - if test "$libtool" = yes; then - "$@" "-Wp,-MDupdate,$tmpdepfile" - else - "$@" -MDupdate "$tmpdepfile" - fi - stat=$? - if test $stat -ne 0; then - rm -f "$tmpdepfile" - exit $stat - fi - rm -f "$depfile" - - if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files - echo "$object : \\" > "$depfile" - # Clip off the initial element (the dependent). Don't try to be - # clever and replace this with sed code, as IRIX sed won't handle - # lines with more than a fixed number of characters (4096 in - # IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines; - # the IRIX cc adds comments like '#:fec' to the end of the - # dependency line. - tr ' ' "$nl" < "$tmpdepfile" \ - | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' \ - | tr "$nl" ' ' >> "$depfile" - echo >> "$depfile" - # The second pass generates a dummy entry for each header file. - tr ' ' "$nl" < "$tmpdepfile" \ - | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \ - >> "$depfile" - else - make_dummy_depfile - fi - rm -f "$tmpdepfile" - ;; - -xlc) - # This case exists only to let depend.m4 do its work. It works by - # looking at the text of this script. This case will never be run, - # since it is checked for above. - exit 1 - ;; - -aix) - # The C for AIX Compiler uses -M and outputs the dependencies - # in a .u file. In older versions, this file always lives in the - # current directory. Also, the AIX compiler puts '$object:' at the - # start of each line; $object doesn't have directory information. - # Version 6 uses the directory in both cases. - set_dir_from "$object" - set_base_from "$object" - if test "$libtool" = yes; then - tmpdepfile1=$dir$base.u - tmpdepfile2=$base.u - tmpdepfile3=$dir.libs/$base.u - "$@" -Wc,-M - else - tmpdepfile1=$dir$base.u - tmpdepfile2=$dir$base.u - tmpdepfile3=$dir$base.u - "$@" -M - fi - stat=$? - if test $stat -ne 0; then - rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" - exit $stat - fi - - for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" - do - test -f "$tmpdepfile" && break - done - aix_post_process_depfile - ;; - -tcc) - # tcc (Tiny C Compiler) understand '-MD -MF file' since version 0.9.26 - # FIXME: That version still under development at the moment of writing. - # Make that this statement remains true also for stable, released - # versions. - # It will wrap lines (doesn't matter whether long or short) with a - # trailing '\', as in: - # - # foo.o : \ - # foo.c \ - # foo.h \ - # - # It will put a trailing '\' even on the last line, and will use leading - # spaces rather than leading tabs (at least since its commit 0394caf7 - # "Emit spaces for -MD"). - "$@" -MD -MF "$tmpdepfile" - stat=$? - if test $stat -ne 0; then - rm -f "$tmpdepfile" - exit $stat - fi - rm -f "$depfile" - # Each non-empty line is of the form 'foo.o : \' or ' dep.h \'. - # We have to change lines of the first kind to '$object: \'. - sed -e "s|.*:|$object :|" < "$tmpdepfile" > "$depfile" - # And for each line of the second kind, we have to emit a 'dep.h:' - # dummy dependency, to avoid the deleted-header problem. - sed -n -e 's|^ *\(.*\) *\\$|\1:|p' < "$tmpdepfile" >> "$depfile" - rm -f "$tmpdepfile" - ;; - -## The order of this option in the case statement is important, since the -## shell code in configure will try each of these formats in the order -## listed in this file. A plain '-MD' option would be understood by many -## compilers, so we must ensure this comes after the gcc and icc options. -pgcc) - # Portland's C compiler understands '-MD'. - # Will always output deps to 'file.d' where file is the root name of the - # source file under compilation, even if file resides in a subdirectory. - # The object file name does not affect the name of the '.d' file. - # pgcc 10.2 will output - # foo.o: sub/foo.c sub/foo.h - # and will wrap long lines using '\' : - # foo.o: sub/foo.c ... \ - # sub/foo.h ... \ - # ... - set_dir_from "$object" - # Use the source, not the object, to determine the base name, since - # that's sadly what pgcc will do too. - set_base_from "$source" - tmpdepfile=$base.d - - # For projects that build the same source file twice into different object - # files, the pgcc approach of using the *source* file root name can cause - # problems in parallel builds. Use a locking strategy to avoid stomping on - # the same $tmpdepfile. - lockdir=$base.d-lock - trap " - echo '$0: caught signal, cleaning up...' >&2 - rmdir '$lockdir' - exit 1 - " 1 2 13 15 - numtries=100 - i=$numtries - while test $i -gt 0; do - # mkdir is a portable test-and-set. - if mkdir "$lockdir" 2>/dev/null; then - # This process acquired the lock. - "$@" -MD - stat=$? - # Release the lock. - rmdir "$lockdir" - break - else - # If the lock is being held by a different process, wait - # until the winning process is done or we timeout. - while test -d "$lockdir" && test $i -gt 0; do - sleep 1 - i=`expr $i - 1` - done - fi - i=`expr $i - 1` - done - trap - 1 2 13 15 - if test $i -le 0; then - echo "$0: failed to acquire lock after $numtries attempts" >&2 - echo "$0: check lockdir '$lockdir'" >&2 - exit 1 - fi - - if test $stat -ne 0; then - rm -f "$tmpdepfile" - exit $stat - fi - rm -f "$depfile" - # Each line is of the form `foo.o: dependent.h', - # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'. - # Do two passes, one to just change these to - # `$object: dependent.h' and one to simply `dependent.h:'. - sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile" - # Some versions of the HPUX 10.20 sed can't process this invocation - # correctly. Breaking it into two sed invocations is a workaround. - sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" \ - | sed -e 's/$/ :/' >> "$depfile" - rm -f "$tmpdepfile" - ;; - -hp2) - # The "hp" stanza above does not work with aCC (C++) and HP's ia64 - # compilers, which have integrated preprocessors. The correct option - # to use with these is +Maked; it writes dependencies to a file named - # 'foo.d', which lands next to the object file, wherever that - # happens to be. - # Much of this is similar to the tru64 case; see comments there. - set_dir_from "$object" - set_base_from "$object" - if test "$libtool" = yes; then - tmpdepfile1=$dir$base.d - tmpdepfile2=$dir.libs/$base.d - "$@" -Wc,+Maked - else - tmpdepfile1=$dir$base.d - tmpdepfile2=$dir$base.d - "$@" +Maked - fi - stat=$? - if test $stat -ne 0; then - rm -f "$tmpdepfile1" "$tmpdepfile2" - exit $stat - fi - - for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" - do - test -f "$tmpdepfile" && break - done - if test -f "$tmpdepfile"; then - sed -e "s,^.*\.[$lower]*:,$object:," "$tmpdepfile" > "$depfile" - # Add 'dependent.h:' lines. - sed -ne '2,${ - s/^ *// - s/ \\*$// - s/$/:/ - p - }' "$tmpdepfile" >> "$depfile" - else - make_dummy_depfile - fi - rm -f "$tmpdepfile" "$tmpdepfile2" - ;; - -tru64) - # The Tru64 compiler uses -MD to generate dependencies as a side - # effect. 'cc -MD -o foo.o ...' puts the dependencies into 'foo.o.d'. - # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put - # dependencies in 'foo.d' instead, so we check for that too. - # Subdirectories are respected. - set_dir_from "$object" - set_base_from "$object" - - if test "$libtool" = yes; then - # Libtool generates 2 separate objects for the 2 libraries. These - # two compilations output dependencies in $dir.libs/$base.o.d and - # in $dir$base.o.d. We have to check for both files, because - # one of the two compilations can be disabled. We should prefer - # $dir$base.o.d over $dir.libs/$base.o.d because the latter is - # automatically cleaned when .libs/ is deleted, while ignoring - # the former would cause a distcleancheck panic. - tmpdepfile1=$dir$base.o.d # libtool 1.5 - tmpdepfile2=$dir.libs/$base.o.d # Likewise. - tmpdepfile3=$dir.libs/$base.d # Compaq CCC V6.2-504 - "$@" -Wc,-MD - else - tmpdepfile1=$dir$base.d - tmpdepfile2=$dir$base.d - tmpdepfile3=$dir$base.d - "$@" -MD - fi - - stat=$? - if test $stat -ne 0; then - rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" - exit $stat - fi - - for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" - do - test -f "$tmpdepfile" && break - done - # Same post-processing that is required for AIX mode. - aix_post_process_depfile - ;; - -msvc7) - if test "$libtool" = yes; then - showIncludes=-Wc,-showIncludes - else - showIncludes=-showIncludes - fi - "$@" $showIncludes > "$tmpdepfile" - stat=$? - grep -v '^Note: including file: ' "$tmpdepfile" - if test $stat -ne 0; then - rm -f "$tmpdepfile" - exit $stat - fi - rm -f "$depfile" - echo "$object : \\" > "$depfile" - # The first sed program below extracts the file names and escapes - # backslashes for cygpath. The second sed program outputs the file - # name when reading, but also accumulates all include files in the - # hold buffer in order to output them again at the end. This only - # works with sed implementations that can handle large buffers. - sed < "$tmpdepfile" -n ' -/^Note: including file: *\(.*\)/ { - s//\1/ - s/\\/\\\\/g - p -}' | $cygpath_u | sort -u | sed -n ' -s/ /\\ /g -s/\(.*\)/'"$tab"'\1 \\/p -s/.\(.*\) \\/\1:/ -H -$ { - s/.*/'"$tab"'/ - G - p -}' >> "$depfile" - echo >> "$depfile" # make sure the fragment doesn't end with a backslash - rm -f "$tmpdepfile" - ;; - -msvc7msys) - # This case exists only to let depend.m4 do its work. It works by - # looking at the text of this script. This case will never be run, - # since it is checked for above. - exit 1 - ;; - -#nosideeffect) - # This comment above is used by automake to tell side-effect - # dependency tracking mechanisms from slower ones. - -dashmstdout) - # Important note: in order to support this mode, a compiler *must* - # always write the preprocessed file to stdout, regardless of -o. - "$@" || exit $? - - # Remove the call to Libtool. - if test "$libtool" = yes; then - while test "X$1" != 'X--mode=compile'; do - shift - done - shift - fi - - # Remove '-o $object'. - IFS=" " - for arg - do - case $arg in - -o) - shift - ;; - $object) - shift - ;; - *) - set fnord "$@" "$arg" - shift # fnord - shift # $arg - ;; - esac - done - - test -z "$dashmflag" && dashmflag=-M - # Require at least two characters before searching for ':' - # in the target name. This is to cope with DOS-style filenames: - # a dependency such as 'c:/foo/bar' could be seen as target 'c' otherwise. - "$@" $dashmflag | - sed "s|^[$tab ]*[^:$tab ][^:][^:]*:[$tab ]*|$object: |" > "$tmpdepfile" - rm -f "$depfile" - cat < "$tmpdepfile" > "$depfile" - # Some versions of the HPUX 10.20 sed can't process this sed invocation - # correctly. Breaking it into two sed invocations is a workaround. - tr ' ' "$nl" < "$tmpdepfile" \ - | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \ - | sed -e 's/$/ :/' >> "$depfile" - rm -f "$tmpdepfile" - ;; - -dashXmstdout) - # This case only exists to satisfy depend.m4. It is never actually - # run, as this mode is specially recognized in the preamble. - exit 1 - ;; - -makedepend) - "$@" || exit $? - # Remove any Libtool call - if test "$libtool" = yes; then - while test "X$1" != 'X--mode=compile'; do - shift - done - shift - fi - # X makedepend - shift - cleared=no eat=no - for arg - do - case $cleared in - no) - set ""; shift - cleared=yes ;; - esac - if test $eat = yes; then - eat=no - continue - fi - case "$arg" in - -D*|-I*) - set fnord "$@" "$arg"; shift ;; - # Strip any option that makedepend may not understand. Remove - # the object too, otherwise makedepend will parse it as a source file. - -arch) - eat=yes ;; - -*|$object) - ;; - *) - set fnord "$@" "$arg"; shift ;; - esac - done - obj_suffix=`echo "$object" | sed 's/^.*\././'` - touch "$tmpdepfile" - ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@" - rm -f "$depfile" - # makedepend may prepend the VPATH from the source file name to the object. - # No need to regex-escape $object, excess matching of '.' is harmless. - sed "s|^.*\($object *:\)|\1|" "$tmpdepfile" > "$depfile" - # Some versions of the HPUX 10.20 sed can't process the last invocation - # correctly. Breaking it into two sed invocations is a workaround. - sed '1,2d' "$tmpdepfile" \ - | tr ' ' "$nl" \ - | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \ - | sed -e 's/$/ :/' >> "$depfile" - rm -f "$tmpdepfile" "$tmpdepfile".bak - ;; - -cpp) - # Important note: in order to support this mode, a compiler *must* - # always write the preprocessed file to stdout. - "$@" || exit $? - - # Remove the call to Libtool. - if test "$libtool" = yes; then - while test "X$1" != 'X--mode=compile'; do - shift - done - shift - fi - - # Remove '-o $object'. - IFS=" " - for arg - do - case $arg in - -o) - shift - ;; - $object) - shift - ;; - *) - set fnord "$@" "$arg" - shift # fnord - shift # $arg - ;; - esac - done - - "$@" -E \ - | sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \ - -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \ - | sed '$ s: \\$::' > "$tmpdepfile" - rm -f "$depfile" - echo "$object : \\" > "$depfile" - cat < "$tmpdepfile" >> "$depfile" - sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile" - rm -f "$tmpdepfile" - ;; - -msvisualcpp) - # Important note: in order to support this mode, a compiler *must* - # always write the preprocessed file to stdout. - "$@" || exit $? - - # Remove the call to Libtool. - if test "$libtool" = yes; then - while test "X$1" != 'X--mode=compile'; do - shift - done - shift - fi - - IFS=" " - for arg - do - case "$arg" in - -o) - shift - ;; - $object) - shift - ;; - "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI") - set fnord "$@" - shift - shift - ;; - *) - set fnord "$@" "$arg" - shift - shift - ;; - esac - done - "$@" -E 2>/dev/null | - sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile" - rm -f "$depfile" - echo "$object : \\" > "$depfile" - sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::'"$tab"'\1 \\:p' >> "$depfile" - echo "$tab" >> "$depfile" - sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile" - rm -f "$tmpdepfile" - ;; - -msvcmsys) - # This case exists only to let depend.m4 do its work. It works by - # looking at the text of this script. This case will never be run, - # since it is checked for above. - exit 1 - ;; - -none) - exec "$@" - ;; - -*) - echo "Unknown depmode $depmode" 1>&2 - exit 1 - ;; -esac - -exit 0 - -# Local Variables: -# mode: shell-script -# sh-indentation: 2 -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "scriptversion=" -# time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-time-zone: "UTC" -# time-stamp-end: "; # UTC" -# End: diff --git a/c/consumer-verification/install-sh b/c/consumer-verification/install-sh deleted file mode 100755 index 59990a104..000000000 --- a/c/consumer-verification/install-sh +++ /dev/null @@ -1,508 +0,0 @@ -#!/bin/sh -# install - install a program, script, or datafile - -scriptversion=2014-09-12.12; # UTC - -# This originates from X11R5 (mit/util/scripts/install.sh), which was -# later released in X11R6 (xc/config/util/install.sh) with the -# following copyright and license. -# -# Copyright (C) 1994 X Consortium -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN -# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- -# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -# Except as contained in this notice, the name of the X Consortium shall not -# be used in advertising or otherwise to promote the sale, use or other deal- -# ings in this Software without prior written authorization from the X Consor- -# tium. -# -# -# FSF changes to this file are in the public domain. -# -# Calling this script install-sh is preferred over install.sh, to prevent -# 'make' implicit rules from creating a file called install from it -# when there is no Makefile. -# -# This script is compatible with the BSD install script, but was written -# from scratch. - -tab=' ' -nl=' -' -IFS=" $tab$nl" - -# Set DOITPROG to "echo" to test this script. - -doit=${DOITPROG-} -doit_exec=${doit:-exec} - -# Put in absolute file names if you don't have them in your path; -# or use environment vars. - -chgrpprog=${CHGRPPROG-chgrp} -chmodprog=${CHMODPROG-chmod} -chownprog=${CHOWNPROG-chown} -cmpprog=${CMPPROG-cmp} -cpprog=${CPPROG-cp} -mkdirprog=${MKDIRPROG-mkdir} -mvprog=${MVPROG-mv} -rmprog=${RMPROG-rm} -stripprog=${STRIPPROG-strip} - -posix_mkdir= - -# Desired mode of installed file. -mode=0755 - -chgrpcmd= -chmodcmd=$chmodprog -chowncmd= -mvcmd=$mvprog -rmcmd="$rmprog -f" -stripcmd= - -src= -dst= -dir_arg= -dst_arg= - -copy_on_change=false -is_target_a_directory=possibly - -usage="\ -Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE - or: $0 [OPTION]... SRCFILES... DIRECTORY - or: $0 [OPTION]... -t DIRECTORY SRCFILES... - or: $0 [OPTION]... -d DIRECTORIES... - -In the 1st form, copy SRCFILE to DSTFILE. -In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. -In the 4th, create DIRECTORIES. - -Options: - --help display this help and exit. - --version display version info and exit. - - -c (ignored) - -C install only if different (preserve the last data modification time) - -d create directories instead of installing files. - -g GROUP $chgrpprog installed files to GROUP. - -m MODE $chmodprog installed files to MODE. - -o USER $chownprog installed files to USER. - -s $stripprog installed files. - -t DIRECTORY install into DIRECTORY. - -T report an error if DSTFILE is a directory. - -Environment variables override the default commands: - CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG - RMPROG STRIPPROG -" - -while test $# -ne 0; do - case $1 in - -c) ;; - - -C) copy_on_change=true;; - - -d) dir_arg=true;; - - -g) chgrpcmd="$chgrpprog $2" - shift;; - - --help) echo "$usage"; exit $?;; - - -m) mode=$2 - case $mode in - *' '* | *"$tab"* | *"$nl"* | *'*'* | *'?'* | *'['*) - echo "$0: invalid mode: $mode" >&2 - exit 1;; - esac - shift;; - - -o) chowncmd="$chownprog $2" - shift;; - - -s) stripcmd=$stripprog;; - - -t) - is_target_a_directory=always - dst_arg=$2 - # Protect names problematic for 'test' and other utilities. - case $dst_arg in - -* | [=\(\)!]) dst_arg=./$dst_arg;; - esac - shift;; - - -T) is_target_a_directory=never;; - - --version) echo "$0 $scriptversion"; exit $?;; - - --) shift - break;; - - -*) echo "$0: invalid option: $1" >&2 - exit 1;; - - *) break;; - esac - shift -done - -# We allow the use of options -d and -T together, by making -d -# take the precedence; this is for compatibility with GNU install. - -if test -n "$dir_arg"; then - if test -n "$dst_arg"; then - echo "$0: target directory not allowed when installing a directory." >&2 - exit 1 - fi -fi - -if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then - # When -d is used, all remaining arguments are directories to create. - # When -t is used, the destination is already specified. - # Otherwise, the last argument is the destination. Remove it from $@. - for arg - do - if test -n "$dst_arg"; then - # $@ is not empty: it contains at least $arg. - set fnord "$@" "$dst_arg" - shift # fnord - fi - shift # arg - dst_arg=$arg - # Protect names problematic for 'test' and other utilities. - case $dst_arg in - -* | [=\(\)!]) dst_arg=./$dst_arg;; - esac - done -fi - -if test $# -eq 0; then - if test -z "$dir_arg"; then - echo "$0: no input file specified." >&2 - exit 1 - fi - # It's OK to call 'install-sh -d' without argument. - # This can happen when creating conditional directories. - exit 0 -fi - -if test -z "$dir_arg"; then - if test $# -gt 1 || test "$is_target_a_directory" = always; then - if test ! -d "$dst_arg"; then - echo "$0: $dst_arg: Is not a directory." >&2 - exit 1 - fi - fi -fi - -if test -z "$dir_arg"; then - do_exit='(exit $ret); exit $ret' - trap "ret=129; $do_exit" 1 - trap "ret=130; $do_exit" 2 - trap "ret=141; $do_exit" 13 - trap "ret=143; $do_exit" 15 - - # Set umask so as not to create temps with too-generous modes. - # However, 'strip' requires both read and write access to temps. - case $mode in - # Optimize common cases. - *644) cp_umask=133;; - *755) cp_umask=22;; - - *[0-7]) - if test -z "$stripcmd"; then - u_plus_rw= - else - u_plus_rw='% 200' - fi - cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;; - *) - if test -z "$stripcmd"; then - u_plus_rw= - else - u_plus_rw=,u+rw - fi - cp_umask=$mode$u_plus_rw;; - esac -fi - -for src -do - # Protect names problematic for 'test' and other utilities. - case $src in - -* | [=\(\)!]) src=./$src;; - esac - - if test -n "$dir_arg"; then - dst=$src - dstdir=$dst - test -d "$dstdir" - dstdir_status=$? - else - - # Waiting for this to be detected by the "$cpprog $src $dsttmp" command - # might cause directories to be created, which would be especially bad - # if $src (and thus $dsttmp) contains '*'. - if test ! -f "$src" && test ! -d "$src"; then - echo "$0: $src does not exist." >&2 - exit 1 - fi - - if test -z "$dst_arg"; then - echo "$0: no destination specified." >&2 - exit 1 - fi - dst=$dst_arg - - # If destination is a directory, append the input filename; won't work - # if double slashes aren't ignored. - if test -d "$dst"; then - if test "$is_target_a_directory" = never; then - echo "$0: $dst_arg: Is a directory" >&2 - exit 1 - fi - dstdir=$dst - dst=$dstdir/`basename "$src"` - dstdir_status=0 - else - dstdir=`dirname "$dst"` - test -d "$dstdir" - dstdir_status=$? - fi - fi - - obsolete_mkdir_used=false - - if test $dstdir_status != 0; then - case $posix_mkdir in - '') - # Create intermediate dirs using mode 755 as modified by the umask. - # This is like FreeBSD 'install' as of 1997-10-28. - umask=`umask` - case $stripcmd.$umask in - # Optimize common cases. - *[2367][2367]) mkdir_umask=$umask;; - .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;; - - *[0-7]) - mkdir_umask=`expr $umask + 22 \ - - $umask % 100 % 40 + $umask % 20 \ - - $umask % 10 % 4 + $umask % 2 - `;; - *) mkdir_umask=$umask,go-w;; - esac - - # With -d, create the new directory with the user-specified mode. - # Otherwise, rely on $mkdir_umask. - if test -n "$dir_arg"; then - mkdir_mode=-m$mode - else - mkdir_mode= - fi - - posix_mkdir=false - case $umask in - *[123567][0-7][0-7]) - # POSIX mkdir -p sets u+wx bits regardless of umask, which - # is incompatible with FreeBSD 'install' when (umask & 300) != 0. - ;; - *) - # $RANDOM is not portable (e.g. dash); use it when possible to - # lower collision chance - tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$ - trap 'ret=$?; rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" 2>/dev/null; exit $ret' 0 - - # As "mkdir -p" follows symlinks and we work in /tmp possibly; so - # create the $tmpdir first (and fail if unsuccessful) to make sure - # that nobody tries to guess the $tmpdir name. - if (umask $mkdir_umask && - $mkdirprog $mkdir_mode "$tmpdir" && - exec $mkdirprog $mkdir_mode -p -- "$tmpdir/a/b") >/dev/null 2>&1 - then - if test -z "$dir_arg" || { - # Check for POSIX incompatibilities with -m. - # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or - # other-writable bit of parent directory when it shouldn't. - # FreeBSD 6.1 mkdir -m -p sets mode of existing directory. - test_tmpdir="$tmpdir/a" - ls_ld_tmpdir=`ls -ld "$test_tmpdir"` - case $ls_ld_tmpdir in - d????-?r-*) different_mode=700;; - d????-?--*) different_mode=755;; - *) false;; - esac && - $mkdirprog -m$different_mode -p -- "$test_tmpdir" && { - ls_ld_tmpdir_1=`ls -ld "$test_tmpdir"` - test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1" - } - } - then posix_mkdir=: - fi - rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" - else - # Remove any dirs left behind by ancient mkdir implementations. - rmdir ./$mkdir_mode ./-p ./-- "$tmpdir" 2>/dev/null - fi - trap '' 0;; - esac;; - esac - - if - $posix_mkdir && ( - umask $mkdir_umask && - $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir" - ) - then : - else - - # The umask is ridiculous, or mkdir does not conform to POSIX, - # or it failed possibly due to a race condition. Create the - # directory the slow way, step by step, checking for races as we go. - - case $dstdir in - /*) prefix='/';; - [-=\(\)!]*) prefix='./';; - *) prefix='';; - esac - - oIFS=$IFS - IFS=/ - set -f - set fnord $dstdir - shift - set +f - IFS=$oIFS - - prefixes= - - for d - do - test X"$d" = X && continue - - prefix=$prefix$d - if test -d "$prefix"; then - prefixes= - else - if $posix_mkdir; then - (umask=$mkdir_umask && - $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break - # Don't fail if two instances are running concurrently. - test -d "$prefix" || exit 1 - else - case $prefix in - *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; - *) qprefix=$prefix;; - esac - prefixes="$prefixes '$qprefix'" - fi - fi - prefix=$prefix/ - done - - if test -n "$prefixes"; then - # Don't fail if two instances are running concurrently. - (umask $mkdir_umask && - eval "\$doit_exec \$mkdirprog $prefixes") || - test -d "$dstdir" || exit 1 - obsolete_mkdir_used=true - fi - fi - fi - - if test -n "$dir_arg"; then - { test -z "$chowncmd" || $doit $chowncmd "$dst"; } && - { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } && - { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false || - test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1 - else - - # Make a couple of temp file names in the proper directory. - dsttmp=$dstdir/_inst.$$_ - rmtmp=$dstdir/_rm.$$_ - - # Trap to clean up those temp files at exit. - trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 - - # Copy the file name to the temp name. - (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") && - - # and set any options; do chmod last to preserve setuid bits. - # - # If any of these fail, we abort the whole thing. If we want to - # ignore errors from any of these, just make sure not to ignore - # errors from the above "$doit $cpprog $src $dsttmp" command. - # - { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } && - { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } && - { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } && - { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } && - - # If -C, don't bother to copy if it wouldn't change the file. - if $copy_on_change && - old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && - new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && - set -f && - set X $old && old=:$2:$4:$5:$6 && - set X $new && new=:$2:$4:$5:$6 && - set +f && - test "$old" = "$new" && - $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 - then - rm -f "$dsttmp" - else - # Rename the file to the real destination. - $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null || - - # The rename failed, perhaps because mv can't rename something else - # to itself, or perhaps because mv is so ancient that it does not - # support -f. - { - # Now remove or move aside any old file at destination location. - # We try this two ways since rm can't unlink itself on some - # systems and the destination file might be busy for other - # reasons. In this case, the final cleanup might fail but the new - # file should still install successfully. - { - test ! -f "$dst" || - $doit $rmcmd -f "$dst" 2>/dev/null || - { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && - { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } - } || - { echo "$0: cannot unlink or rename $dst" >&2 - (exit 1); exit 1 - } - } && - - # Now rename the file to the real destination. - $doit $mvcmd "$dsttmp" "$dst" - } - fi || exit 1 - - trap '' 0 - fi -done - -# Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "scriptversion=" -# time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-time-zone: "UTC" -# time-stamp-end: "; # UTC" -# End: diff --git a/c/consumer-verification/libcurl.m4 b/c/consumer-verification/libcurl.m4 deleted file mode 100644 index 53d694d0a..000000000 --- a/c/consumer-verification/libcurl.m4 +++ /dev/null @@ -1,272 +0,0 @@ -#*************************************************************************** -# _ _ ____ _ -# Project ___| | | | _ \| | -# / __| | | | |_) | | -# | (__| |_| | _ <| |___ -# \___|\___/|_| \_\_____| -# -# Copyright (C) 2006, David Shaw -# -# This software is licensed as described in the file COPYING, which -# you should have received as part of this distribution. The terms -# are also available at https://curl.haxx.se/docs/copyright.html. -# -# You may opt to use, copy, modify, merge, publish, distribute and/or sell -# copies of the Software, and permit persons to whom the Software is -# furnished to do so, under the terms of the COPYING file. -# -# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY -# KIND, either express or implied. -# -########################################################################### -# LIBCURL_CHECK_CONFIG ([DEFAULT-ACTION], [MINIMUM-VERSION], -# [ACTION-IF-YES], [ACTION-IF-NO]) -# ---------------------------------------------------------- -# David Shaw May-09-2006 -# -# Checks for libcurl. DEFAULT-ACTION is the string yes or no to -# specify whether to default to --with-libcurl or --without-libcurl. -# If not supplied, DEFAULT-ACTION is yes. MINIMUM-VERSION is the -# minimum version of libcurl to accept. Pass the version as a regular -# version number like 7.10.1. If not supplied, any version is -# accepted. ACTION-IF-YES is a list of shell commands to run if -# libcurl was successfully found and passed the various tests. -# ACTION-IF-NO is a list of shell commands that are run otherwise. -# Note that using --without-libcurl does run ACTION-IF-NO. -# -# This macro #defines HAVE_LIBCURL if a working libcurl setup is -# found, and sets @LIBCURL@ and @LIBCURL_CPPFLAGS@ to the necessary -# values. Other useful defines are LIBCURL_FEATURE_xxx where xxx are -# the various features supported by libcurl, and LIBCURL_PROTOCOL_yyy -# where yyy are the various protocols supported by libcurl. Both xxx -# and yyy are capitalized. See the list of AH_TEMPLATEs at the top of -# the macro for the complete list of possible defines. Shell -# variables $libcurl_feature_xxx and $libcurl_protocol_yyy are also -# defined to 'yes' for those features and protocols that were found. -# Note that xxx and yyy keep the same capitalization as in the -# curl-config list (e.g. it's "HTTP" and not "http"). -# -# Users may override the detected values by doing something like: -# LIBCURL="-lcurl" LIBCURL_CPPFLAGS="-I/usr/myinclude" ./configure -# -# For the sake of sanity, this macro assumes that any libcurl that is -# found is after version 7.7.2, the first version that included the -# curl-config script. Note that it is very important for people -# packaging binary versions of libcurl to include this script! -# Without curl-config, we can only guess what protocols are available, -# or use curl_version_info to figure it out at runtime. - -AC_DEFUN([LIBCURL_CHECK_CONFIG], -[ - AH_TEMPLATE([LIBCURL_FEATURE_SSL],[Defined if libcurl supports SSL]) - AH_TEMPLATE([LIBCURL_FEATURE_KRB4],[Defined if libcurl supports KRB4]) - AH_TEMPLATE([LIBCURL_FEATURE_IPV6],[Defined if libcurl supports IPv6]) - AH_TEMPLATE([LIBCURL_FEATURE_LIBZ],[Defined if libcurl supports libz]) - AH_TEMPLATE([LIBCURL_FEATURE_ASYNCHDNS],[Defined if libcurl supports AsynchDNS]) - AH_TEMPLATE([LIBCURL_FEATURE_IDN],[Defined if libcurl supports IDN]) - AH_TEMPLATE([LIBCURL_FEATURE_SSPI],[Defined if libcurl supports SSPI]) - AH_TEMPLATE([LIBCURL_FEATURE_NTLM],[Defined if libcurl supports NTLM]) - - AH_TEMPLATE([LIBCURL_PROTOCOL_HTTP],[Defined if libcurl supports HTTP]) - AH_TEMPLATE([LIBCURL_PROTOCOL_HTTPS],[Defined if libcurl supports HTTPS]) - AH_TEMPLATE([LIBCURL_PROTOCOL_FTP],[Defined if libcurl supports FTP]) - AH_TEMPLATE([LIBCURL_PROTOCOL_FTPS],[Defined if libcurl supports FTPS]) - AH_TEMPLATE([LIBCURL_PROTOCOL_FILE],[Defined if libcurl supports FILE]) - AH_TEMPLATE([LIBCURL_PROTOCOL_TELNET],[Defined if libcurl supports TELNET]) - AH_TEMPLATE([LIBCURL_PROTOCOL_LDAP],[Defined if libcurl supports LDAP]) - AH_TEMPLATE([LIBCURL_PROTOCOL_DICT],[Defined if libcurl supports DICT]) - AH_TEMPLATE([LIBCURL_PROTOCOL_TFTP],[Defined if libcurl supports TFTP]) - AH_TEMPLATE([LIBCURL_PROTOCOL_RTSP],[Defined if libcurl supports RTSP]) - AH_TEMPLATE([LIBCURL_PROTOCOL_POP3],[Defined if libcurl supports POP3]) - AH_TEMPLATE([LIBCURL_PROTOCOL_IMAP],[Defined if libcurl supports IMAP]) - AH_TEMPLATE([LIBCURL_PROTOCOL_SMTP],[Defined if libcurl supports SMTP]) - - AC_ARG_WITH(libcurl, - AS_HELP_STRING([--with-libcurl=PREFIX],[look for the curl library in PREFIX/lib and headers in PREFIX/include]), - [_libcurl_with=$withval],[_libcurl_with=ifelse([$1],,[yes],[$1])]) - - if test "$_libcurl_with" != "no" ; then - - AC_PROG_AWK - - _libcurl_version_parse="eval $AWK '{split(\$NF,A,\".\"); X=256*256*A[[1]]+256*A[[2]]+A[[3]]; print X;}'" - - _libcurl_try_link=yes - - if test -d "$_libcurl_with" ; then - LIBCURL_CPPFLAGS="-I$withval/include" - _libcurl_ldflags="-L$withval/lib" - AC_PATH_PROG([_libcurl_config],[curl-config],[], - ["$withval/bin"]) - else - AC_PATH_PROG([_libcurl_config],[curl-config],[],[$PATH]) - fi - - if test x$_libcurl_config != "x" ; then - AC_CACHE_CHECK([for the version of libcurl], - [libcurl_cv_lib_curl_version], - [libcurl_cv_lib_curl_version=`$_libcurl_config --version | $AWK '{print $[]2}'`]) - - _libcurl_version=`echo $libcurl_cv_lib_curl_version | $_libcurl_version_parse` - _libcurl_wanted=`echo ifelse([$2],,[0],[$2]) | $_libcurl_version_parse` - - if test $_libcurl_wanted -gt 0 ; then - AC_CACHE_CHECK([for libcurl >= version $2], - [libcurl_cv_lib_version_ok], - [ - if test $_libcurl_version -ge $_libcurl_wanted ; then - libcurl_cv_lib_version_ok=yes - else - libcurl_cv_lib_version_ok=no - fi - ]) - fi - - if test $_libcurl_wanted -eq 0 || test x$libcurl_cv_lib_version_ok = xyes ; then - if test x"$LIBCURL_CPPFLAGS" = "x" ; then - LIBCURL_CPPFLAGS=`$_libcurl_config --cflags` - fi - if test x"$LIBCURL" = "x" ; then - LIBCURL=`$_libcurl_config --libs` - - # This is so silly, but Apple actually has a bug in their - # curl-config script. Fixed in Tiger, but there are still - # lots of Panther installs around. - case "${host}" in - powerpc-apple-darwin7*) - LIBCURL=`echo $LIBCURL | sed -e 's|-arch i386||g'` - ;; - esac - fi - - # All curl-config scripts support --feature - _libcurl_features=`$_libcurl_config --feature` - - # Is it modern enough to have --protocols? (7.12.4) - if test $_libcurl_version -ge 461828 ; then - _libcurl_protocols=`$_libcurl_config --protocols` - fi - else - _libcurl_try_link=no - fi - - unset _libcurl_wanted - fi - - if test $_libcurl_try_link = yes ; then - - # we didn't find curl-config, so let's see if the user-supplied - # link line (or failing that, "-lcurl") is enough. - LIBCURL=${LIBCURL-"$_libcurl_ldflags -lcurl"} - - AC_CACHE_CHECK([whether libcurl is usable], - [libcurl_cv_lib_curl_usable], - [ - _libcurl_save_cppflags=$CPPFLAGS - CPPFLAGS="$LIBCURL_CPPFLAGS $CPPFLAGS" - _libcurl_save_libs=$LIBS - LIBS="$LIBCURL $LIBS" - - AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include ]],[[ -/* Try and use a few common options to force a failure if we are - missing symbols or can't link. */ -int x; -curl_easy_setopt(NULL,CURLOPT_URL,NULL); -x=CURL_ERROR_SIZE; -x=CURLOPT_WRITEFUNCTION; -x=CURLOPT_WRITEDATA; -x=CURLOPT_ERRORBUFFER; -x=CURLOPT_STDERR; -x=CURLOPT_VERBOSE; -if (x) {;} -]])],libcurl_cv_lib_curl_usable=yes,libcurl_cv_lib_curl_usable=no) - - CPPFLAGS=$_libcurl_save_cppflags - LIBS=$_libcurl_save_libs - unset _libcurl_save_cppflags - unset _libcurl_save_libs - ]) - - if test $libcurl_cv_lib_curl_usable = yes ; then - - # Does curl_free() exist in this version of libcurl? - # If not, fake it with free() - - _libcurl_save_cppflags=$CPPFLAGS - CPPFLAGS="$CPPFLAGS $LIBCURL_CPPFLAGS" - _libcurl_save_libs=$LIBS - LIBS="$LIBS $LIBCURL" - - AC_CHECK_FUNC(curl_free,, - AC_DEFINE(curl_free,free, - [Define curl_free() as free() if our version of curl lacks curl_free.])) - - CPPFLAGS=$_libcurl_save_cppflags - LIBS=$_libcurl_save_libs - unset _libcurl_save_cppflags - unset _libcurl_save_libs - - AC_DEFINE(HAVE_LIBCURL,1, - [Define to 1 if you have a functional curl library.]) - AC_SUBST(LIBCURL_CPPFLAGS) - AC_SUBST(LIBCURL) - - for _libcurl_feature in $_libcurl_features ; do - AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_feature_$_libcurl_feature),[1]) - eval AS_TR_SH(libcurl_feature_$_libcurl_feature)=yes - done - - if test "x$_libcurl_protocols" = "x" ; then - - # We don't have --protocols, so just assume that all - # protocols are available - _libcurl_protocols="HTTP FTP FILE TELNET LDAP DICT TFTP" - - if test x$libcurl_feature_SSL = xyes ; then - _libcurl_protocols="$_libcurl_protocols HTTPS" - - # FTPS wasn't standards-compliant until version - # 7.11.0 (0x070b00 == 461568) - if test $_libcurl_version -ge 461568; then - _libcurl_protocols="$_libcurl_protocols FTPS" - fi - fi - - # RTSP, IMAP, POP3 and SMTP were added in - # 7.20.0 (0x071400 == 463872) - if test $_libcurl_version -ge 463872; then - _libcurl_protocols="$_libcurl_protocols RTSP IMAP POP3 SMTP" - fi - fi - - for _libcurl_protocol in $_libcurl_protocols ; do - AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_protocol_$_libcurl_protocol),[1]) - eval AS_TR_SH(libcurl_protocol_$_libcurl_protocol)=yes - done - else - unset LIBCURL - unset LIBCURL_CPPFLAGS - fi - fi - - unset _libcurl_try_link - unset _libcurl_version_parse - unset _libcurl_config - unset _libcurl_feature - unset _libcurl_features - unset _libcurl_protocol - unset _libcurl_protocols - unset _libcurl_version - unset _libcurl_ldflags - fi - - if test x$_libcurl_with = xno || test x$libcurl_cv_lib_curl_usable != xyes ; then - # This is the IF-NO path - ifelse([$4],,:,[$4]) - else - # This is the IF-YES path - ifelse([$3],,:,[$3]) - fi - - unset _libcurl_with -])dnl diff --git a/c/consumer-verification/missing b/c/consumer-verification/missing deleted file mode 100755 index f62bbae30..000000000 --- a/c/consumer-verification/missing +++ /dev/null @@ -1,215 +0,0 @@ -#! /bin/sh -# Common wrapper for a few potentially missing GNU programs. - -scriptversion=2013-10-28.13; # UTC - -# Copyright (C) 1996-2014 Free Software Foundation, Inc. -# Originally written by Fran,cois Pinard , 1996. - -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2, or (at your option) -# any later version. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - -if test $# -eq 0; then - echo 1>&2 "Try '$0 --help' for more information" - exit 1 -fi - -case $1 in - - --is-lightweight) - # Used by our autoconf macros to check whether the available missing - # script is modern enough. - exit 0 - ;; - - --run) - # Back-compat with the calling convention used by older automake. - shift - ;; - - -h|--h|--he|--hel|--help) - echo "\ -$0 [OPTION]... PROGRAM [ARGUMENT]... - -Run 'PROGRAM [ARGUMENT]...', returning a proper advice when this fails due -to PROGRAM being missing or too old. - -Options: - -h, --help display this help and exit - -v, --version output version information and exit - -Supported PROGRAM values: - aclocal autoconf autoheader autom4te automake makeinfo - bison yacc flex lex help2man - -Version suffixes to PROGRAM as well as the prefixes 'gnu-', 'gnu', and -'g' are ignored when checking the name. - -Send bug reports to ." - exit $? - ;; - - -v|--v|--ve|--ver|--vers|--versi|--versio|--version) - echo "missing $scriptversion (GNU Automake)" - exit $? - ;; - - -*) - echo 1>&2 "$0: unknown '$1' option" - echo 1>&2 "Try '$0 --help' for more information" - exit 1 - ;; - -esac - -# Run the given program, remember its exit status. -"$@"; st=$? - -# If it succeeded, we are done. -test $st -eq 0 && exit 0 - -# Also exit now if we it failed (or wasn't found), and '--version' was -# passed; such an option is passed most likely to detect whether the -# program is present and works. -case $2 in --version|--help) exit $st;; esac - -# Exit code 63 means version mismatch. This often happens when the user -# tries to use an ancient version of a tool on a file that requires a -# minimum version. -if test $st -eq 63; then - msg="probably too old" -elif test $st -eq 127; then - # Program was missing. - msg="missing on your system" -else - # Program was found and executed, but failed. Give up. - exit $st -fi - -perl_URL=http://www.perl.org/ -flex_URL=http://flex.sourceforge.net/ -gnu_software_URL=http://www.gnu.org/software - -program_details () -{ - case $1 in - aclocal|automake) - echo "The '$1' program is part of the GNU Automake package:" - echo "<$gnu_software_URL/automake>" - echo "It also requires GNU Autoconf, GNU m4 and Perl in order to run:" - echo "<$gnu_software_URL/autoconf>" - echo "<$gnu_software_URL/m4/>" - echo "<$perl_URL>" - ;; - autoconf|autom4te|autoheader) - echo "The '$1' program is part of the GNU Autoconf package:" - echo "<$gnu_software_URL/autoconf/>" - echo "It also requires GNU m4 and Perl in order to run:" - echo "<$gnu_software_URL/m4/>" - echo "<$perl_URL>" - ;; - esac -} - -give_advice () -{ - # Normalize program name to check for. - normalized_program=`echo "$1" | sed ' - s/^gnu-//; t - s/^gnu//; t - s/^g//; t'` - - printf '%s\n' "'$1' is $msg." - - configure_deps="'configure.ac' or m4 files included by 'configure.ac'" - case $normalized_program in - autoconf*) - echo "You should only need it if you modified 'configure.ac'," - echo "or m4 files included by it." - program_details 'autoconf' - ;; - autoheader*) - echo "You should only need it if you modified 'acconfig.h' or" - echo "$configure_deps." - program_details 'autoheader' - ;; - automake*) - echo "You should only need it if you modified 'Makefile.am' or" - echo "$configure_deps." - program_details 'automake' - ;; - aclocal*) - echo "You should only need it if you modified 'acinclude.m4' or" - echo "$configure_deps." - program_details 'aclocal' - ;; - autom4te*) - echo "You might have modified some maintainer files that require" - echo "the 'autom4te' program to be rebuilt." - program_details 'autom4te' - ;; - bison*|yacc*) - echo "You should only need it if you modified a '.y' file." - echo "You may want to install the GNU Bison package:" - echo "<$gnu_software_URL/bison/>" - ;; - lex*|flex*) - echo "You should only need it if you modified a '.l' file." - echo "You may want to install the Fast Lexical Analyzer package:" - echo "<$flex_URL>" - ;; - help2man*) - echo "You should only need it if you modified a dependency" \ - "of a man page." - echo "You may want to install the GNU Help2man package:" - echo "<$gnu_software_URL/help2man/>" - ;; - makeinfo*) - echo "You should only need it if you modified a '.texi' file, or" - echo "any other file indirectly affecting the aspect of the manual." - echo "You might want to install the Texinfo package:" - echo "<$gnu_software_URL/texinfo/>" - echo "The spurious makeinfo call might also be the consequence of" - echo "using a buggy 'make' (AIX, DU, IRIX), in which case you might" - echo "want to install GNU make:" - echo "<$gnu_software_URL/make/>" - ;; - *) - echo "You might have modified some files without having the proper" - echo "tools for further handling them. Check the 'README' file, it" - echo "often tells you about the needed prerequisites for installing" - echo "this package. You may also peek at any GNU archive site, in" - echo "case some other package contains this missing '$1' program." - ;; - esac -} - -give_advice "$1" | sed -e '1s/^/WARNING: /' \ - -e '2,$s/^/ /' >&2 - -# Propagate the correct exit status (expected to be 127 for a program -# not found, 63 for a program that failed due to version mismatch). -exit $st - -# Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "scriptversion=" -# time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-time-zone: "UTC" -# time-stamp-end: "; # UTC" -# End: diff --git a/c/consumer-verification/src/Makefile b/c/consumer-verification/src/Makefile deleted file mode 100644 index 83698e7e0..000000000 --- a/c/consumer-verification/src/Makefile +++ /dev/null @@ -1,575 +0,0 @@ -# Makefile.in generated by automake 1.16.3 from Makefile.am. -# src/Makefile. Generated from Makefile.in by configure. - -# Copyright (C) 1994-2020 Free Software Foundation, Inc. - -# This Makefile.in is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY, to the extent permitted by law; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. - - - - -am__is_gnu_make = { \ - if test -z '$(MAKELEVEL)'; then \ - false; \ - elif test -n '$(MAKE_HOST)'; then \ - true; \ - elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ - true; \ - else \ - false; \ - fi; \ -} -am__make_running_with_option = \ - case $${target_option-} in \ - ?) ;; \ - *) echo "am__make_running_with_option: internal error: invalid" \ - "target option '$${target_option-}' specified" >&2; \ - exit 1;; \ - esac; \ - has_opt=no; \ - sane_makeflags=$$MAKEFLAGS; \ - if $(am__is_gnu_make); then \ - sane_makeflags=$$MFLAGS; \ - else \ - case $$MAKEFLAGS in \ - *\\[\ \ ]*) \ - bs=\\; \ - sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ - | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ - esac; \ - fi; \ - skip_next=no; \ - strip_trailopt () \ - { \ - flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ - }; \ - for flg in $$sane_makeflags; do \ - test $$skip_next = yes && { skip_next=no; continue; }; \ - case $$flg in \ - *=*|--*) continue;; \ - -*I) strip_trailopt 'I'; skip_next=yes;; \ - -*I?*) strip_trailopt 'I';; \ - -*O) strip_trailopt 'O'; skip_next=yes;; \ - -*O?*) strip_trailopt 'O';; \ - -*l) strip_trailopt 'l'; skip_next=yes;; \ - -*l?*) strip_trailopt 'l';; \ - -[dEDm]) skip_next=yes;; \ - -[JT]) skip_next=yes;; \ - esac; \ - case $$flg in \ - *$$target_option*) has_opt=yes; break;; \ - esac; \ - done; \ - test $$has_opt = yes -am__make_dryrun = (target_option=n; $(am__make_running_with_option)) -am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) -pkgdatadir = $(datadir)/consumer-verification -pkgincludedir = $(includedir)/consumer-verification -pkglibdir = $(libdir)/consumer-verification -pkglibexecdir = $(libexecdir)/consumer-verification -am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd -install_sh_DATA = $(install_sh) -c -m 644 -install_sh_PROGRAM = $(install_sh) -c -install_sh_SCRIPT = $(install_sh) -c -INSTALL_HEADER = $(INSTALL_DATA) -transform = $(program_transform_name) -NORMAL_INSTALL = : -PRE_INSTALL = : -POST_INSTALL = : -NORMAL_UNINSTALL = : -PRE_UNINSTALL = : -POST_UNINSTALL = : -bin_PROGRAMS = consumer-verification$(EXEEXT) -subdir = src -ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/libcurl.m4 \ - $(top_srcdir)/configure.ac -am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) -DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) -mkinstalldirs = $(install_sh) -d -CONFIG_HEADER = $(top_builddir)/config.h -CONFIG_CLEAN_FILES = -CONFIG_CLEAN_VPATH_FILES = -am__installdirs = "$(DESTDIR)$(bindir)" -PROGRAMS = $(bin_PROGRAMS) -am_consumer_verification_OBJECTS = main.$(OBJEXT) -consumer_verification_OBJECTS = $(am_consumer_verification_OBJECTS) -am__DEPENDENCIES_1 = -consumer_verification_DEPENDENCIES = $(am__DEPENDENCIES_1) -AM_V_P = $(am__v_P_$(V)) -am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY)) -am__v_P_0 = false -am__v_P_1 = : -AM_V_GEN = $(am__v_GEN_$(V)) -am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY)) -am__v_GEN_0 = @echo " GEN " $@; -am__v_GEN_1 = -AM_V_at = $(am__v_at_$(V)) -am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY)) -am__v_at_0 = @ -am__v_at_1 = -DEFAULT_INCLUDES = -I. -I$(top_builddir) -depcomp = $(SHELL) $(top_srcdir)/depcomp -am__maybe_remake_depfiles = depfiles -am__depfiles_remade = ./$(DEPDIR)/main.Po -am__mv = mv -f -COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ - $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -AM_V_CC = $(am__v_CC_$(V)) -am__v_CC_ = $(am__v_CC_$(AM_DEFAULT_VERBOSITY)) -am__v_CC_0 = @echo " CC " $@; -am__v_CC_1 = -CCLD = $(CC) -LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ -AM_V_CCLD = $(am__v_CCLD_$(V)) -am__v_CCLD_ = $(am__v_CCLD_$(AM_DEFAULT_VERBOSITY)) -am__v_CCLD_0 = @echo " CCLD " $@; -am__v_CCLD_1 = -SOURCES = $(consumer_verification_SOURCES) -DIST_SOURCES = $(consumer_verification_SOURCES) -am__can_run_installinfo = \ - case $$AM_UPDATE_INFO_DIR in \ - n|no|NO) false;; \ - *) (install-info --version) >/dev/null 2>&1;; \ - esac -am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) -# Read a list of newline-separated strings from the standard input, -# and print each of them once, without duplicates. Input order is -# *not* preserved. -am__uniquify_input = $(AWK) '\ - BEGIN { nonempty = 0; } \ - { items[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in items) print i; }; } \ -' -# Make sure the list of sources is unique. This is necessary because, -# e.g., the same source file might be shared among _SOURCES variables -# for different programs/libraries. -am__define_uniq_tagged_files = \ - list='$(am__tagged_files)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | $(am__uniquify_input)` -ETAGS = etags -CTAGS = ctags -am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp -DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) -ACLOCAL = ${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' aclocal-1.16 -AMTAR = $${TAR-tar} -AM_DEFAULT_VERBOSITY = 1 -AUTOCONF = ${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' autoconf -AUTOHEADER = ${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' autoheader -AUTOMAKE = ${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' automake-1.16 -AWK = gawk -CC = gcc -CCDEPMODE = depmode=gcc3 -CFLAGS = -g -O2 -CPPFLAGS = -CYGPATH_W = echo -DEFS = -DHAVE_CONFIG_H -DEPDIR = .deps -ECHO_C = -ECHO_N = -n -ECHO_T = -EXEEXT = -INSTALL = /usr/bin/install -c -INSTALL_DATA = ${INSTALL} -m 644 -INSTALL_PROGRAM = ${INSTALL} -INSTALL_SCRIPT = ${INSTALL} -INSTALL_STRIP_PROGRAM = $(install_sh) -c -s -LDFLAGS = -LIBCURL = -L/usr/local/lib -lcurl -LIBCURL_CPPFLAGS = -I/usr/local/include -LIBOBJS = -LIBS = -LTLIBOBJS = -MAKEINFO = ${SHELL} '/home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/missing' makeinfo -MKDIR_P = /usr/bin/mkdir -p -OBJEXT = o -PACKAGE = consumer-verification -PACKAGE_BUGREPORT = bug-automake@gnu.org -PACKAGE_NAME = consumer-verification -PACKAGE_STRING = consumer-verification 0.0.0 -PACKAGE_TARNAME = consumer-verification -PACKAGE_URL = -PACKAGE_VERSION = 0.0.0 -PATH_SEPARATOR = : -SET_MAKE = -SHELL = /bin/bash -STRIP = -VERSION = 0.0.0 -_libcurl_config = -abs_builddir = /home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/src -abs_srcdir = /home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/src -abs_top_builddir = /home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification -abs_top_srcdir = /home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification -ac_ct_CC = gcc -am__include = include -am__leading_dot = . -am__quote = -am__tar = $${TAR-tar} chof - "$$tardir" -am__untar = $${TAR-tar} xf - -bindir = ${exec_prefix}/bin -build_alias = -builddir = . -datadir = ${datarootdir} -datarootdir = ${prefix}/share -docdir = ${datarootdir}/doc/${PACKAGE_TARNAME} -dvidir = ${docdir} -exec_prefix = ${prefix} -host_alias = -htmldir = ${docdir} -includedir = ${prefix}/include -infodir = ${datarootdir}/info -install_sh = ${SHELL} /home/ronald/Development/Projects/Pact/pact-reference/c/consumer-verification/install-sh -libdir = ${exec_prefix}/lib -libexecdir = ${exec_prefix}/libexec -localedir = ${datarootdir}/locale -localstatedir = ${prefix}/var -mandir = ${datarootdir}/man -mkdir_p = $(MKDIR_P) -oldincludedir = /usr/include -pdfdir = ${docdir} -prefix = /usr/local -program_transform_name = s,x,x, -psdir = ${docdir} -runstatedir = ${localstatedir}/run -sbindir = ${exec_prefix}/sbin -sharedstatedir = ${prefix}/com -srcdir = . -sysconfdir = ${prefix}/etc -target_alias = -top_build_prefix = ../ -top_builddir = .. -top_srcdir = .. -consumer_verification_SOURCES = main.c -AM_CPPFLAGS = ${LIBCURL_CPPFLAGS} -I../../../rust/pact_ffi/include -consumer_verification_LDADD = ${LIBCURL} -ldl -all: all-am - -.SUFFIXES: -.SUFFIXES: .c .o .obj -$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) - @for dep in $?; do \ - case '$(am__configure_deps)' in \ - *$$dep*) \ - ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ - && { if test -f $@; then exit 0; else break; fi; }; \ - exit 1;; \ - esac; \ - done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/Makefile'; \ - $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign src/Makefile -Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ - *) \ - echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \ - cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \ - esac; - -$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh - -$(top_srcdir)/configure: $(am__configure_deps) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh -$(ACLOCAL_M4): $(am__aclocal_m4_deps) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh -$(am__aclocal_m4_deps): -install-binPROGRAMS: $(bin_PROGRAMS) - @$(NORMAL_INSTALL) - @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ - if test -n "$$list"; then \ - echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ - $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ - fi; \ - for p in $$list; do echo "$$p $$p"; done | \ - sed 's/$(EXEEXT)$$//' | \ - while read p p1; do if test -f $$p \ - ; then echo "$$p"; echo "$$p"; else :; fi; \ - done | \ - sed -e 'p;s,.*/,,;n;h' \ - -e 's|.*|.|' \ - -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ - sed 'N;N;N;s,\n, ,g' | \ - $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ - { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ - if ($$2 == $$4) files[d] = files[d] " " $$1; \ - else { print "f", $$3 "/" $$4, $$1; } } \ - END { for (d in files) print "f", d, files[d] }' | \ - while read type dir files; do \ - if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ - test -z "$$files" || { \ - echo " $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ - $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ - } \ - ; done - -uninstall-binPROGRAMS: - @$(NORMAL_UNINSTALL) - @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ - files=`for p in $$list; do echo "$$p"; done | \ - sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ - -e 's/$$/$(EXEEXT)/' \ - `; \ - test -n "$$list" || exit 0; \ - echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ - cd "$(DESTDIR)$(bindir)" && rm -f $$files - -clean-binPROGRAMS: - -test -z "$(bin_PROGRAMS)" || rm -f $(bin_PROGRAMS) - -consumer-verification$(EXEEXT): $(consumer_verification_OBJECTS) $(consumer_verification_DEPENDENCIES) $(EXTRA_consumer_verification_DEPENDENCIES) - @rm -f consumer-verification$(EXEEXT) - $(AM_V_CCLD)$(LINK) $(consumer_verification_OBJECTS) $(consumer_verification_LDADD) $(LIBS) - -mostlyclean-compile: - -rm -f *.$(OBJEXT) - -distclean-compile: - -rm -f *.tab.c - -include ./$(DEPDIR)/main.Po # am--include-marker - -$(am__depfiles_remade): - @$(MKDIR_P) $(@D) - @echo '# dummy' >$@-t && $(am__mv) $@-t $@ - -am--depfiles: $(am__depfiles_remade) - -.c.o: - $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< - $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po -# $(AM_V_CC)source='$<' object='$@' libtool=no \ -# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \ -# $(AM_V_CC_no)$(COMPILE) -c -o $@ $< - -.c.obj: - $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` - $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po -# $(AM_V_CC)source='$<' object='$@' libtool=no \ -# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \ -# $(AM_V_CC_no)$(COMPILE) -c -o $@ `$(CYGPATH_W) '$<'` - -ID: $(am__tagged_files) - $(am__define_uniq_tagged_files); mkid -fID $$unique -tags: tags-am -TAGS: tags - -tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) - set x; \ - here=`pwd`; \ - $(am__define_uniq_tagged_files); \ - shift; \ - if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ - test -n "$$unique" || unique=$$empty_fix; \ - if test $$# -gt 0; then \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - "$$@" $$unique; \ - else \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - $$unique; \ - fi; \ - fi -ctags: ctags-am - -CTAGS: ctags -ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) - $(am__define_uniq_tagged_files); \ - test -z "$(CTAGS_ARGS)$$unique" \ - || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ - $$unique - -GTAGS: - here=`$(am__cd) $(top_builddir) && pwd` \ - && $(am__cd) $(top_srcdir) \ - && gtags -i $(GTAGS_ARGS) "$$here" -cscopelist: cscopelist-am - -cscopelist-am: $(am__tagged_files) - list='$(am__tagged_files)'; \ - case "$(srcdir)" in \ - [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ - *) sdir=$(subdir)/$(srcdir) ;; \ - esac; \ - for i in $$list; do \ - if test -f "$$i"; then \ - echo "$(subdir)/$$i"; \ - else \ - echo "$$sdir/$$i"; \ - fi; \ - done >> $(top_builddir)/cscope.files - -distclean-tags: - -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags - -distdir: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) distdir-am - -distdir-am: $(DISTFILES) - @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - list='$(DISTFILES)'; \ - dist_files=`for file in $$list; do echo $$file; done | \ - sed -e "s|^$$srcdirstrip/||;t" \ - -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ - case $$dist_files in \ - */*) $(MKDIR_P) `echo "$$dist_files" | \ - sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ - sort -u` ;; \ - esac; \ - for file in $$dist_files; do \ - if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ - if test -d $$d/$$file; then \ - dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ - if test -d "$(distdir)/$$file"; then \ - find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ - fi; \ - if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ - find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ - fi; \ - cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ - else \ - test -f "$(distdir)/$$file" \ - || cp -p $$d/$$file "$(distdir)/$$file" \ - || exit 1; \ - fi; \ - done -check-am: all-am -check: check-am -all-am: Makefile $(PROGRAMS) -installdirs: - for dir in "$(DESTDIR)$(bindir)"; do \ - test -z "$$dir" || $(MKDIR_P) "$$dir"; \ - done -install: install-am -install-exec: install-exec-am -install-data: install-data-am -uninstall: uninstall-am - -install-am: all-am - @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am - -installcheck: installcheck-am -install-strip: - if test -z '$(STRIP)'; then \ - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - install; \ - else \ - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ - fi -mostlyclean-generic: - -clean-generic: - -distclean-generic: - -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) - -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) - -maintainer-clean-generic: - @echo "This command is intended for maintainers to use" - @echo "it deletes files that may require special tools to rebuild." -clean: clean-am - -clean-am: clean-binPROGRAMS clean-generic mostlyclean-am - -distclean: distclean-am - -rm -f ./$(DEPDIR)/main.Po - -rm -f Makefile -distclean-am: clean-am distclean-compile distclean-generic \ - distclean-tags - -dvi: dvi-am - -dvi-am: - -html: html-am - -html-am: - -info: info-am - -info-am: - -install-data-am: - -install-dvi: install-dvi-am - -install-dvi-am: - -install-exec-am: install-binPROGRAMS - -install-html: install-html-am - -install-html-am: - -install-info: install-info-am - -install-info-am: - -install-man: - -install-pdf: install-pdf-am - -install-pdf-am: - -install-ps: install-ps-am - -install-ps-am: - -installcheck-am: - -maintainer-clean: maintainer-clean-am - -rm -f ./$(DEPDIR)/main.Po - -rm -f Makefile -maintainer-clean-am: distclean-am maintainer-clean-generic - -mostlyclean: mostlyclean-am - -mostlyclean-am: mostlyclean-compile mostlyclean-generic - -pdf: pdf-am - -pdf-am: - -ps: ps-am - -ps-am: - -uninstall-am: uninstall-binPROGRAMS - -.MAKE: install-am install-strip - -.PHONY: CTAGS GTAGS TAGS all all-am am--depfiles check check-am clean \ - clean-binPROGRAMS clean-generic cscopelist-am ctags ctags-am \ - distclean distclean-compile distclean-generic distclean-tags \ - distdir dvi dvi-am html html-am info info-am install \ - install-am install-binPROGRAMS install-data install-data-am \ - install-dvi install-dvi-am install-exec install-exec-am \ - install-html install-html-am install-info install-info-am \ - install-man install-pdf install-pdf-am install-ps \ - install-ps-am install-strip installcheck installcheck-am \ - installdirs maintainer-clean maintainer-clean-generic \ - mostlyclean mostlyclean-compile mostlyclean-generic pdf pdf-am \ - ps ps-am tags tags-am uninstall uninstall-am \ - uninstall-binPROGRAMS - -.PRECIOUS: Makefile - - -# Tell versions [3.59,3.63) of GNU make to not export all variables. -# Otherwise a system limit (for SysV at least) may be exceeded. -.NOEXPORT: diff --git a/c/consumer-verification/src/Makefile.am b/c/consumer-verification/src/Makefile.am deleted file mode 100644 index c1bf80a46..000000000 --- a/c/consumer-verification/src/Makefile.am +++ /dev/null @@ -1,4 +0,0 @@ -bin_PROGRAMS = consumer-verification -consumer_verification_SOURCES = main.c -AM_CPPFLAGS = ${LIBCURL_CPPFLAGS} -I../../../rust/pact_ffi/include -consumer_verification_LDADD = ${LIBCURL} -ldl diff --git a/c/consumer-verification/src/Makefile.in b/c/consumer-verification/src/Makefile.in deleted file mode 100644 index 753ba0296..000000000 --- a/c/consumer-verification/src/Makefile.in +++ /dev/null @@ -1,575 +0,0 @@ -# Makefile.in generated by automake 1.16.3 from Makefile.am. -# @configure_input@ - -# Copyright (C) 1994-2020 Free Software Foundation, Inc. - -# This Makefile.in is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY, to the extent permitted by law; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. - -@SET_MAKE@ - -VPATH = @srcdir@ -am__is_gnu_make = { \ - if test -z '$(MAKELEVEL)'; then \ - false; \ - elif test -n '$(MAKE_HOST)'; then \ - true; \ - elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ - true; \ - else \ - false; \ - fi; \ -} -am__make_running_with_option = \ - case $${target_option-} in \ - ?) ;; \ - *) echo "am__make_running_with_option: internal error: invalid" \ - "target option '$${target_option-}' specified" >&2; \ - exit 1;; \ - esac; \ - has_opt=no; \ - sane_makeflags=$$MAKEFLAGS; \ - if $(am__is_gnu_make); then \ - sane_makeflags=$$MFLAGS; \ - else \ - case $$MAKEFLAGS in \ - *\\[\ \ ]*) \ - bs=\\; \ - sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ - | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ - esac; \ - fi; \ - skip_next=no; \ - strip_trailopt () \ - { \ - flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ - }; \ - for flg in $$sane_makeflags; do \ - test $$skip_next = yes && { skip_next=no; continue; }; \ - case $$flg in \ - *=*|--*) continue;; \ - -*I) strip_trailopt 'I'; skip_next=yes;; \ - -*I?*) strip_trailopt 'I';; \ - -*O) strip_trailopt 'O'; skip_next=yes;; \ - -*O?*) strip_trailopt 'O';; \ - -*l) strip_trailopt 'l'; skip_next=yes;; \ - -*l?*) strip_trailopt 'l';; \ - -[dEDm]) skip_next=yes;; \ - -[JT]) skip_next=yes;; \ - esac; \ - case $$flg in \ - *$$target_option*) has_opt=yes; break;; \ - esac; \ - done; \ - test $$has_opt = yes -am__make_dryrun = (target_option=n; $(am__make_running_with_option)) -am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) -pkgdatadir = $(datadir)/@PACKAGE@ -pkgincludedir = $(includedir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ -pkglibexecdir = $(libexecdir)/@PACKAGE@ -am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd -install_sh_DATA = $(install_sh) -c -m 644 -install_sh_PROGRAM = $(install_sh) -c -install_sh_SCRIPT = $(install_sh) -c -INSTALL_HEADER = $(INSTALL_DATA) -transform = $(program_transform_name) -NORMAL_INSTALL = : -PRE_INSTALL = : -POST_INSTALL = : -NORMAL_UNINSTALL = : -PRE_UNINSTALL = : -POST_UNINSTALL = : -bin_PROGRAMS = consumer-verification$(EXEEXT) -subdir = src -ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/libcurl.m4 \ - $(top_srcdir)/configure.ac -am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) -DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) -mkinstalldirs = $(install_sh) -d -CONFIG_HEADER = $(top_builddir)/config.h -CONFIG_CLEAN_FILES = -CONFIG_CLEAN_VPATH_FILES = -am__installdirs = "$(DESTDIR)$(bindir)" -PROGRAMS = $(bin_PROGRAMS) -am_consumer_verification_OBJECTS = main.$(OBJEXT) -consumer_verification_OBJECTS = $(am_consumer_verification_OBJECTS) -am__DEPENDENCIES_1 = -consumer_verification_DEPENDENCIES = $(am__DEPENDENCIES_1) -AM_V_P = $(am__v_P_@AM_V@) -am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) -am__v_P_0 = false -am__v_P_1 = : -AM_V_GEN = $(am__v_GEN_@AM_V@) -am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) -am__v_GEN_0 = @echo " GEN " $@; -am__v_GEN_1 = -AM_V_at = $(am__v_at_@AM_V@) -am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) -am__v_at_0 = @ -am__v_at_1 = -DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) -depcomp = $(SHELL) $(top_srcdir)/depcomp -am__maybe_remake_depfiles = depfiles -am__depfiles_remade = ./$(DEPDIR)/main.Po -am__mv = mv -f -COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ - $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -AM_V_CC = $(am__v_CC_@AM_V@) -am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) -am__v_CC_0 = @echo " CC " $@; -am__v_CC_1 = -CCLD = $(CC) -LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ -AM_V_CCLD = $(am__v_CCLD_@AM_V@) -am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) -am__v_CCLD_0 = @echo " CCLD " $@; -am__v_CCLD_1 = -SOURCES = $(consumer_verification_SOURCES) -DIST_SOURCES = $(consumer_verification_SOURCES) -am__can_run_installinfo = \ - case $$AM_UPDATE_INFO_DIR in \ - n|no|NO) false;; \ - *) (install-info --version) >/dev/null 2>&1;; \ - esac -am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) -# Read a list of newline-separated strings from the standard input, -# and print each of them once, without duplicates. Input order is -# *not* preserved. -am__uniquify_input = $(AWK) '\ - BEGIN { nonempty = 0; } \ - { items[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in items) print i; }; } \ -' -# Make sure the list of sources is unique. This is necessary because, -# e.g., the same source file might be shared among _SOURCES variables -# for different programs/libraries. -am__define_uniq_tagged_files = \ - list='$(am__tagged_files)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | $(am__uniquify_input)` -ETAGS = etags -CTAGS = ctags -am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp -DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) -ACLOCAL = @ACLOCAL@ -AMTAR = @AMTAR@ -AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ -AUTOCONF = @AUTOCONF@ -AUTOHEADER = @AUTOHEADER@ -AUTOMAKE = @AUTOMAKE@ -AWK = @AWK@ -CC = @CC@ -CCDEPMODE = @CCDEPMODE@ -CFLAGS = @CFLAGS@ -CPPFLAGS = @CPPFLAGS@ -CYGPATH_W = @CYGPATH_W@ -DEFS = @DEFS@ -DEPDIR = @DEPDIR@ -ECHO_C = @ECHO_C@ -ECHO_N = @ECHO_N@ -ECHO_T = @ECHO_T@ -EXEEXT = @EXEEXT@ -INSTALL = @INSTALL@ -INSTALL_DATA = @INSTALL_DATA@ -INSTALL_PROGRAM = @INSTALL_PROGRAM@ -INSTALL_SCRIPT = @INSTALL_SCRIPT@ -INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -LDFLAGS = @LDFLAGS@ -LIBCURL = @LIBCURL@ -LIBCURL_CPPFLAGS = @LIBCURL_CPPFLAGS@ -LIBOBJS = @LIBOBJS@ -LIBS = @LIBS@ -LTLIBOBJS = @LTLIBOBJS@ -MAKEINFO = @MAKEINFO@ -MKDIR_P = @MKDIR_P@ -OBJEXT = @OBJEXT@ -PACKAGE = @PACKAGE@ -PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ -PACKAGE_NAME = @PACKAGE_NAME@ -PACKAGE_STRING = @PACKAGE_STRING@ -PACKAGE_TARNAME = @PACKAGE_TARNAME@ -PACKAGE_URL = @PACKAGE_URL@ -PACKAGE_VERSION = @PACKAGE_VERSION@ -PATH_SEPARATOR = @PATH_SEPARATOR@ -SET_MAKE = @SET_MAKE@ -SHELL = @SHELL@ -STRIP = @STRIP@ -VERSION = @VERSION@ -_libcurl_config = @_libcurl_config@ -abs_builddir = @abs_builddir@ -abs_srcdir = @abs_srcdir@ -abs_top_builddir = @abs_top_builddir@ -abs_top_srcdir = @abs_top_srcdir@ -ac_ct_CC = @ac_ct_CC@ -am__include = @am__include@ -am__leading_dot = @am__leading_dot@ -am__quote = @am__quote@ -am__tar = @am__tar@ -am__untar = @am__untar@ -bindir = @bindir@ -build_alias = @build_alias@ -builddir = @builddir@ -datadir = @datadir@ -datarootdir = @datarootdir@ -docdir = @docdir@ -dvidir = @dvidir@ -exec_prefix = @exec_prefix@ -host_alias = @host_alias@ -htmldir = @htmldir@ -includedir = @includedir@ -infodir = @infodir@ -install_sh = @install_sh@ -libdir = @libdir@ -libexecdir = @libexecdir@ -localedir = @localedir@ -localstatedir = @localstatedir@ -mandir = @mandir@ -mkdir_p = @mkdir_p@ -oldincludedir = @oldincludedir@ -pdfdir = @pdfdir@ -prefix = @prefix@ -program_transform_name = @program_transform_name@ -psdir = @psdir@ -runstatedir = @runstatedir@ -sbindir = @sbindir@ -sharedstatedir = @sharedstatedir@ -srcdir = @srcdir@ -sysconfdir = @sysconfdir@ -target_alias = @target_alias@ -top_build_prefix = @top_build_prefix@ -top_builddir = @top_builddir@ -top_srcdir = @top_srcdir@ -consumer_verification_SOURCES = main.c -AM_CPPFLAGS = ${LIBCURL_CPPFLAGS} -I../../../rust/pact_ffi/include -consumer_verification_LDADD = ${LIBCURL} -ldl -all: all-am - -.SUFFIXES: -.SUFFIXES: .c .o .obj -$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) - @for dep in $?; do \ - case '$(am__configure_deps)' in \ - *$$dep*) \ - ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ - && { if test -f $@; then exit 0; else break; fi; }; \ - exit 1;; \ - esac; \ - done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/Makefile'; \ - $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign src/Makefile -Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ - *) \ - echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \ - cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \ - esac; - -$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh - -$(top_srcdir)/configure: $(am__configure_deps) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh -$(ACLOCAL_M4): $(am__aclocal_m4_deps) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh -$(am__aclocal_m4_deps): -install-binPROGRAMS: $(bin_PROGRAMS) - @$(NORMAL_INSTALL) - @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ - if test -n "$$list"; then \ - echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ - $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ - fi; \ - for p in $$list; do echo "$$p $$p"; done | \ - sed 's/$(EXEEXT)$$//' | \ - while read p p1; do if test -f $$p \ - ; then echo "$$p"; echo "$$p"; else :; fi; \ - done | \ - sed -e 'p;s,.*/,,;n;h' \ - -e 's|.*|.|' \ - -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ - sed 'N;N;N;s,\n, ,g' | \ - $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ - { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ - if ($$2 == $$4) files[d] = files[d] " " $$1; \ - else { print "f", $$3 "/" $$4, $$1; } } \ - END { for (d in files) print "f", d, files[d] }' | \ - while read type dir files; do \ - if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ - test -z "$$files" || { \ - echo " $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ - $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ - } \ - ; done - -uninstall-binPROGRAMS: - @$(NORMAL_UNINSTALL) - @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ - files=`for p in $$list; do echo "$$p"; done | \ - sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ - -e 's/$$/$(EXEEXT)/' \ - `; \ - test -n "$$list" || exit 0; \ - echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ - cd "$(DESTDIR)$(bindir)" && rm -f $$files - -clean-binPROGRAMS: - -test -z "$(bin_PROGRAMS)" || rm -f $(bin_PROGRAMS) - -consumer-verification$(EXEEXT): $(consumer_verification_OBJECTS) $(consumer_verification_DEPENDENCIES) $(EXTRA_consumer_verification_DEPENDENCIES) - @rm -f consumer-verification$(EXEEXT) - $(AM_V_CCLD)$(LINK) $(consumer_verification_OBJECTS) $(consumer_verification_LDADD) $(LIBS) - -mostlyclean-compile: - -rm -f *.$(OBJEXT) - -distclean-compile: - -rm -f *.tab.c - -@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/main.Po@am__quote@ # am--include-marker - -$(am__depfiles_remade): - @$(MKDIR_P) $(@D) - @echo '# dummy' >$@-t && $(am__mv) $@-t $@ - -am--depfiles: $(am__depfiles_remade) - -.c.o: -@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< -@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po -@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ -@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ -@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ $< - -.c.obj: -@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` -@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po -@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ -@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ -@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ `$(CYGPATH_W) '$<'` - -ID: $(am__tagged_files) - $(am__define_uniq_tagged_files); mkid -fID $$unique -tags: tags-am -TAGS: tags - -tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) - set x; \ - here=`pwd`; \ - $(am__define_uniq_tagged_files); \ - shift; \ - if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ - test -n "$$unique" || unique=$$empty_fix; \ - if test $$# -gt 0; then \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - "$$@" $$unique; \ - else \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - $$unique; \ - fi; \ - fi -ctags: ctags-am - -CTAGS: ctags -ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) - $(am__define_uniq_tagged_files); \ - test -z "$(CTAGS_ARGS)$$unique" \ - || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ - $$unique - -GTAGS: - here=`$(am__cd) $(top_builddir) && pwd` \ - && $(am__cd) $(top_srcdir) \ - && gtags -i $(GTAGS_ARGS) "$$here" -cscopelist: cscopelist-am - -cscopelist-am: $(am__tagged_files) - list='$(am__tagged_files)'; \ - case "$(srcdir)" in \ - [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ - *) sdir=$(subdir)/$(srcdir) ;; \ - esac; \ - for i in $$list; do \ - if test -f "$$i"; then \ - echo "$(subdir)/$$i"; \ - else \ - echo "$$sdir/$$i"; \ - fi; \ - done >> $(top_builddir)/cscope.files - -distclean-tags: - -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags - -distdir: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) distdir-am - -distdir-am: $(DISTFILES) - @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - list='$(DISTFILES)'; \ - dist_files=`for file in $$list; do echo $$file; done | \ - sed -e "s|^$$srcdirstrip/||;t" \ - -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ - case $$dist_files in \ - */*) $(MKDIR_P) `echo "$$dist_files" | \ - sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ - sort -u` ;; \ - esac; \ - for file in $$dist_files; do \ - if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ - if test -d $$d/$$file; then \ - dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ - if test -d "$(distdir)/$$file"; then \ - find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ - fi; \ - if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ - find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ - fi; \ - cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ - else \ - test -f "$(distdir)/$$file" \ - || cp -p $$d/$$file "$(distdir)/$$file" \ - || exit 1; \ - fi; \ - done -check-am: all-am -check: check-am -all-am: Makefile $(PROGRAMS) -installdirs: - for dir in "$(DESTDIR)$(bindir)"; do \ - test -z "$$dir" || $(MKDIR_P) "$$dir"; \ - done -install: install-am -install-exec: install-exec-am -install-data: install-data-am -uninstall: uninstall-am - -install-am: all-am - @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am - -installcheck: installcheck-am -install-strip: - if test -z '$(STRIP)'; then \ - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - install; \ - else \ - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ - fi -mostlyclean-generic: - -clean-generic: - -distclean-generic: - -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) - -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) - -maintainer-clean-generic: - @echo "This command is intended for maintainers to use" - @echo "it deletes files that may require special tools to rebuild." -clean: clean-am - -clean-am: clean-binPROGRAMS clean-generic mostlyclean-am - -distclean: distclean-am - -rm -f ./$(DEPDIR)/main.Po - -rm -f Makefile -distclean-am: clean-am distclean-compile distclean-generic \ - distclean-tags - -dvi: dvi-am - -dvi-am: - -html: html-am - -html-am: - -info: info-am - -info-am: - -install-data-am: - -install-dvi: install-dvi-am - -install-dvi-am: - -install-exec-am: install-binPROGRAMS - -install-html: install-html-am - -install-html-am: - -install-info: install-info-am - -install-info-am: - -install-man: - -install-pdf: install-pdf-am - -install-pdf-am: - -install-ps: install-ps-am - -install-ps-am: - -installcheck-am: - -maintainer-clean: maintainer-clean-am - -rm -f ./$(DEPDIR)/main.Po - -rm -f Makefile -maintainer-clean-am: distclean-am maintainer-clean-generic - -mostlyclean: mostlyclean-am - -mostlyclean-am: mostlyclean-compile mostlyclean-generic - -pdf: pdf-am - -pdf-am: - -ps: ps-am - -ps-am: - -uninstall-am: uninstall-binPROGRAMS - -.MAKE: install-am install-strip - -.PHONY: CTAGS GTAGS TAGS all all-am am--depfiles check check-am clean \ - clean-binPROGRAMS clean-generic cscopelist-am ctags ctags-am \ - distclean distclean-compile distclean-generic distclean-tags \ - distdir dvi dvi-am html html-am info info-am install \ - install-am install-binPROGRAMS install-data install-data-am \ - install-dvi install-dvi-am install-exec install-exec-am \ - install-html install-html-am install-info install-info-am \ - install-man install-pdf install-pdf-am install-ps \ - install-ps-am install-strip installcheck installcheck-am \ - installdirs maintainer-clean maintainer-clean-generic \ - mostlyclean mostlyclean-compile mostlyclean-generic pdf pdf-am \ - ps ps-am tags tags-am uninstall uninstall-am \ - uninstall-binPROGRAMS - -.PRECIOUS: Makefile - - -# Tell versions [3.59,3.63) of GNU make to not export all variables. -# Otherwise a system limit (for SysV at least) may be exceeded. -.NOEXPORT: diff --git a/c/consumer-verification/src/main.c b/c/consumer-verification/src/main.c deleted file mode 100644 index 74062f36d..000000000 --- a/c/consumer-verification/src/main.c +++ /dev/null @@ -1,265 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -char *append_filename(char *executable, char *filename) { - int executable_len = strlen(executable); - int filename_len = strlen(filename); - int package_len = strlen(PACKAGE_NAME); - int diff = executable_len - package_len; - char *string = malloc(diff + filename_len + 1); - memcpy(string, executable, diff); - memcpy(string + diff, filename, filename_len); - string[diff + filename_len] = 0; - return string; -} - -char *slurp_file(char *filename) { - FILE *fp = fopen(filename, "rb"); - if (fp) { - fseek(fp, 0, SEEK_END); - long fsize = ftell(fp); - fseek(fp, 0, SEEK_SET); - char *string = malloc(fsize + 1); - int read = fread(string, fsize, 1, fp); - string[fsize] = 0; - fclose(fp); - return string; - } else { - printf("Failed to read %s\n", filename); - return 0; - } -} - -/* - Definitions of the exported functions from the pact mock server library -*/ -typedef int32_t (*lib_create_mock_server)(const char *, const char*); -typedef int32_t (*lib_mock_server_matched)(int32_t); -typedef int32_t (*lib_cleanup_mock_server)(int32_t); -typedef char* (*lib_mock_server_mismatches)(int32_t); - -typedef int (*lib_log_to_buffer)(enum LevelFilter level_filter); -typedef int (*lib_log_to_stdout)(enum LevelFilter level_filter); -typedef const char * (*lib_fetch_log_buffer)(const char *); -typedef const char * (*lib_mock_server_logs)(int32_t); -typedef void (*lib_string_delete)(const char *string); - -lib_create_mock_server create_mock_server; -lib_mock_server_matched mock_server_matched; -lib_cleanup_mock_server cleanup_mock_server; -lib_mock_server_mismatches mock_server_mismatches; - -lib_log_to_buffer mock_server__log_to_buffer; -lib_log_to_stdout mock_server__log_to_stdout; -lib_fetch_log_buffer mock_server__fetch_log_buffer; -lib_mock_server_logs mock_server__mock_server_logs; -lib_string_delete mock_server__string_delete; - -/* Loads the mock server shared library and sets up the functions we need to call */ -int setup_mock_server_functions(char *mock_server_lib) { - /* Get a handle to the pact mock server library*/ - void *handle = dlopen(mock_server_lib, RTLD_NOW | RTLD_GLOBAL); - if (handle) { - /* We have a handle, so lookup the functions we need */ - create_mock_server = dlsym(handle, "pactffi_create_mock_server"); - mock_server_matched = dlsym(handle, "pactffi_mock_server_matched"); - cleanup_mock_server = dlsym(handle, "pactffi_cleanup_mock_server"); - mock_server_mismatches = dlsym(handle, "pactffi_mock_server_mismatches"); - mock_server__log_to_buffer = dlsym(handle, "pactffi_log_to_buffer"); - mock_server__log_to_stdout = dlsym(handle, "pactffi_log_to_stdout"); - mock_server__fetch_log_buffer = dlsym(handle, "pactffi_fetch_log_buffer"); - mock_server__mock_server_logs = dlsym(handle, "pactffi_mock_server_logs"); - mock_server__string_delete = dlsym(handle, "pactffi_string_delete"); - return create_mock_server != 0 && mock_server_matched != 0 && cleanup_mock_server != 0 && - mock_server_mismatches != 0; - } else { - printf("Failed to open shared library %s\n", dlerror()); - return 0; - } -} - -/* Execute the basic test against the provider server */ -void execute_basic_test(int port) { - CURL *curl = curl_easy_init(); - if (curl) { - char url[64]; - sprintf(url, "http://localhost:%d/mallory?name=ron&status=good", port); - printf("Executing request against %s\n", url); - curl_easy_setopt(curl, CURLOPT_URL, url); - curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L); - CURLcode res = curl_easy_perform(curl); - if (res != CURLE_OK) { - puts("\nRequest failed"); - } - puts("\n"); - curl_easy_cleanup(curl); - } else { - puts("Could not initialise the curl library."); - } -} - -/* - Run a basic test sing the simple_pact.json file -*/ -void basic_test(char *executable) { - /* Load the pact file into memory */ - char *pactfile = append_filename(executable, "simple_pact.json"); - char *pact = slurp_file(pactfile); - if (pact) { - /* Create the mock server from the pact file. The mock server port will be returned */ - int port = create_mock_server(pact, "127.0.0.1:0"); - if (port > 0) { - printf("Mock server started on port %d\n", port); - - /* Now we execute out test against the mock server */ - execute_basic_test(port); - - /* Check the result */ - if (mock_server_matched(port)) { - puts("OK: Mock server verified all requests, as expected"); - } else { - puts("FAILED: Mock server did not match all requests!!"); - } - - puts("--------------- MOCK SERVER LOGS ---------------"); - const char* logs = mock_server__mock_server_logs(port); - puts(logs); - puts("------------------------------------------------"); - - /* Lastly, we need to shutdown and cleanup the mock server */ - cleanup_mock_server(port); - - free(pact); - } else { - printf("Failed to start mock_server %d\n", port); - } - } else { - printf("Failed to read %s\n", pactfile); - } - free(pactfile); -} - -char *error_data = "{\"complete\": {\"body\":123457}, \"body\": [1,2,3]}\n"; - -size_t error_test_read_callback(char *buffer, size_t size, size_t nitems, void *instream) { - strcpy(buffer, error_data); - return strlen(error_data); -} - -/* Execute the error test against the provider server, where we expect validations to fail */ -void execute_error_test(int port) { - CURL *curl = curl_easy_init(); - if (curl) { - char url[32]; - sprintf(url, "http://localhost:%d/?test=hi", port); - printf("Executing request against %s\n", url); - curl_easy_setopt(curl, CURLOPT_URL, url); - curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L); - curl_easy_setopt(curl, CURLOPT_UPLOAD, 1L); - curl_easy_setopt(curl, CURLOPT_INFILESIZE, strlen(error_data)); - - struct curl_slist *list = NULL; - list = curl_slist_append(list, "Content-Type: application/json"); - list = curl_slist_append(list, "Expect:"); - curl_easy_setopt(curl, CURLOPT_HTTPHEADER, list); - - curl_easy_setopt(curl, CURLOPT_READFUNCTION, error_test_read_callback); - - CURLcode res = curl_easy_perform(curl); - if (res != CURLE_OK) { - printf("\nRequest failed: %d - %s\n", res, curl_easy_strerror(res)); - } - puts("\n"); - curl_easy_cleanup(curl); - } else { - puts("Could not initialise the curl library."); - } -} - -/* - Run a error test sing the test_pact_with_bodies.json file. This test is expected to have some verification errors. -*/ -void error_test(char *executable) { - /* Load the pact file into memory */ - char *pactfile = append_filename(executable, "test_pact_with_bodies.json"); - char *pact = slurp_file(pactfile); - if (pact) { - /* Create the mock server from the pact file. The mock server port will be returned */ - int port = create_mock_server(pact, "127.0.0.1:0"); - if (port > 0) { - printf("Mock server started on port %d\n", port); - - /* Now we execute out test against the mock server */ - execute_error_test(port); - - /* Check the result */ - if (mock_server_matched(port)) { - puts("FAILED: Mock server verified all requests!!"); - } else { - puts("OK: Mock server did not match all requests."); - char *mismatch_json = mock_server_mismatches(port); - puts(mismatch_json); - } - - puts("--------------- MOCK SERVER LOGS ---------------"); - const char* logs = mock_server__mock_server_logs(port); - puts(logs); - puts("------------------------------------------------"); - - /* Lastly, we need to shutdown and cleanup the mock server */ - cleanup_mock_server(port); - free(pact); - } else { - printf("Failed to start mock_server %d\n", port); - } - } else { - printf("Failed to read %s\n", pactfile); - } - free(pactfile); -} - -int main (int argc, char **argv) { - puts("This is " PACKAGE_STRING "."); - - if (argc < 3 || (strcmp(argv[1], "basic") != 0 && strcmp(argv[1], "error") != 0 && strcmp(argv[1], "both") != 0)) { - puts("You need to specify the test to run: basic, error or both and the path to the pact FFI DLL"); - return 1; - } - - if (!setup_mock_server_functions(argv[2])) { - puts("Failed to setup the mock server library functions, exiting.\n"); - return 1; - } - - int result = mock_server__log_to_buffer(LevelFilter_Debug); - printf("Setup logging result: %d\n", result); - - curl_global_init(CURL_GLOBAL_ALL); - - if (strcmp(argv[1], "basic") == 0) { - puts("Running basic pact test"); - basic_test(argv[0]); - } else if (strcmp(argv[1], "error") == 0) { - puts("Running error pact test"); - error_test(argv[0]); - } else if (strcmp(argv[1], "both") == 0) { - puts("Running both tests test"); - basic_test(argv[0]); - error_test(argv[0]); - } else { - puts("Hmm, I'm sure I validated all the inputs, so how did you get here?"); - } - - puts("------------------ GLOBAL LOGS ------------------"); - const char* logs = mock_server__fetch_log_buffer(NULL); - puts(logs); - mock_server__string_delete(logs); - puts("------------------------------------------------"); - - return 0; -} diff --git a/c/consumer-verification/src/simple_pact.json b/c/consumer-verification/src/simple_pact.json deleted file mode 100644 index 3469d23da..000000000 --- a/c/consumer-verification/src/simple_pact.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "provider": { - "name": "Alice Service" - }, - "consumer": { - "name": "Consumer" - }, - "interactions": [ - { - "description": "a retrieve Mallory request", - "request": { - "method": "GET", - "path": "/mallory", - "query": "name=ron&status=good" - }, - "response": { - "status": 200, - "headers": { - "Content-Type": "text/html" - }, - "body": "\"That is some good Mallory.\"" - } - } - ] -} diff --git a/c/consumer-verification/src/test_pact_with_bodies.json b/c/consumer-verification/src/test_pact_with_bodies.json deleted file mode 100644 index 019fbc1b9..000000000 --- a/c/consumer-verification/src/test_pact_with_bodies.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "provider": { - "name": "test_provider" - }, - "consumer": { - "name": "test_consumer" - }, - "interactions": [ - { - "providerState": "test state", - "description": "test interaction", - "request": { - "method": "PUT", - "path": "/", - "body": { - "complete": { - "certificateUri": "http://...", - "issues": { - "idNotFound": {} - }, - "nevdis": { - "body": null, - "colour": null, - "engine": null - }, - "body": 123456 - }, - "body": [ - 1, - 2, - 3 - ] - } - }, - "response": { - "status": 200 - } - } - ], - "metadata": { - "pact-specification": { - "version": "2.0.0" - }, - "pact-jvm": { - "version": "" - } - } -} diff --git a/c/provider-verifcation/CMakeLists.txt b/c/provider-verifcation/CMakeLists.txt deleted file mode 100644 index 6684f6ed1..000000000 --- a/c/provider-verifcation/CMakeLists.txt +++ /dev/null @@ -1,12 +0,0 @@ -cmake_minimum_required(VERSION 3.10) - -project(c-verify VERSION 1.0 LANGUAGES C) - -get_filename_component(REAL_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/../.." ABSOLUTE) -set(SEARCH_PATH "${REAL_ROOT}/rust/pact_ffi/build/install/lib/cmake") - -find_package(PactFfi REQUIRED CONFIG PATHS ${SEARCH_PATH}) - -add_executable(c-verify src/main.c) - -target_link_libraries(c-verify PRIVATE PactFfi pthread dl m) diff --git a/c/provider-verifcation/src/main.c b/c/provider-verifcation/src/main.c deleted file mode 100644 index 788d73822..000000000 --- a/c/provider-verifcation/src/main.c +++ /dev/null @@ -1,25 +0,0 @@ -#include -#include -#include -#include - -int main (int argc, char **argv) { - pactffi_log_to_buffer(LevelFilter_Trace); - - VerifierHandle *handle = pactffi_verifier_new(); - pactffi_verifier_set_provider_info(handle, "c-provider", NULL, NULL, 0, NULL); - pactffi_verifier_add_file_source(handle, "pact.json"); - - int result = pactffi_verifier_execute(handle); - - puts("--------------- LOGS ---------------"); - const char *logs = pactffi_verifier_logs(handle); - printf("Got logs == %p\n", (void *) logs); - printf("logs: %s\n", logs); - puts("------------------------------------"); - - pactffi_free_string(logs); - pactffi_verifier_shutdown(handle); - - return result; -} diff --git a/compatibility-suite/Cargo.lock b/compatibility-suite/Cargo.lock deleted file mode 100644 index 3c315939a..000000000 --- a/compatibility-suite/Cargo.lock +++ /dev/null @@ -1,4260 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "addr2line" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" -dependencies = [ - "gimli", -] - -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "aes" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" -dependencies = [ - "cfg-if", - "cipher", - "cpufeatures", -] - -[[package]] -name = "aho-corasick" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" -dependencies = [ - "memchr", -] - -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "ansi_term" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi", -] - -[[package]] -name = "anstream" -version = "0.6.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc" - -[[package]] -name = "anstyle-parse" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" -dependencies = [ - "anstyle", - "windows-sys 0.52.0", -] - -[[package]] -name = "anyhow" -version = "1.0.82" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519" - -[[package]] -name = "ariadne" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72fe02fc62033df9ba41cba57ee19acf5e742511a140c7dbc3a873e19a19a1bd" -dependencies = [ - "unicode-width", - "yansi 0.5.1", -] - -[[package]] -name = "async-compression" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07dbbf24db18d609b1462965249abdf49129ccad073ec257da372adc83259c60" -dependencies = [ - "flate2", - "futures-core", - "memchr", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "async-stream" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" -dependencies = [ - "async-stream-impl", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "async-trait" -version = "0.1.80" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "atomic" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba" - -[[package]] -name = "atomic" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d818003e740b63afc82337e3160717f4f63078720a810b7b903e70a5d1d2994" -dependencies = [ - "bytemuck", -] - -[[package]] -name = "autocfg" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" - -[[package]] -name = "axum" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" -dependencies = [ - "async-trait", - "axum-core", - "bitflags 1.3.2", - "bytes", - "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.28", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "sync_wrapper", - "tower", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-core" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "mime", - "rustversion", - "tower-layer", - "tower-service", -] - -[[package]] -name = "backtrace" -version = "0.3.71" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" -dependencies = [ - "addr2line", - "cc", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", -] - -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - -[[package]] -name = "base64" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" - -[[package]] -name = "base64ct" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" - -[[package]] -name = "beef" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" - -[[package]] -name = "binascii" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "383d29d513d8764dcdc42ea295d979eb99c3c9f00607b3692cf68a431f7dca72" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitflags" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "bstr" -version = "1.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" -dependencies = [ - "memchr", - "serde", -] - -[[package]] -name = "bumpalo" -version = "3.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" - -[[package]] -name = "bytecount" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" - -[[package]] -name = "bytemuck" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "bytes" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" -dependencies = [ - "serde", -] - -[[package]] -name = "bzip2" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8" -dependencies = [ - "bzip2-sys", - "libc", -] - -[[package]] -name = "bzip2-sys" -version = "0.1.11+1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" -dependencies = [ - "cc", - "libc", - "pkg-config", -] - -[[package]] -name = "cc" -version = "1.0.95" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d32a725bc159af97c3e629873bb9f88fb8cf8a4867175f76dc987815ea07c83b" -dependencies = [ - "jobserver", - "libc", - "once_cell", -] - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "chrono" -version = "0.4.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" -dependencies = [ - "android-tzdata", - "iana-time-zone", - "num-traits", - "serde", - "windows-targets 0.52.5", -] - -[[package]] -name = "chrono-tz" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d59ae0466b83e838b81a54256c39d5d7c20b9d7daa10510a242d9b75abd5936e" -dependencies = [ - "chrono", - "chrono-tz-build", - "phf", -] - -[[package]] -name = "chrono-tz-build" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "433e39f13c9a060046954e0592a8d0a4bcb1040125cbf91cb8ee58964cfb350f" -dependencies = [ - "parse-zoneinfo", - "phf", - "phf_codegen", -] - -[[package]] -name = "cipher" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" -dependencies = [ - "crypto-common", - "inout", -] - -[[package]] -name = "clap" -version = "4.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim 0.11.1", - "terminal_size", -] - -[[package]] -name = "clap_derive" -version = "4.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" -dependencies = [ - "heck 0.5.0", - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "clap_lex" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" - -[[package]] -name = "colorchoice" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" - -[[package]] -name = "compatibility-suite" -version = "0.0.0" -dependencies = [ - "anyhow", - "async-trait", - "base64 0.22.0", - "bytes", - "cucumber", - "futures", - "itertools 0.12.1", - "lazy_static", - "maplit", - "pact_consumer", - "pact_matching", - "pact_mock_server", - "pact_models", - "pact_verifier", - "pretty_assertions", - "regex", - "reqwest 0.12.4", - "rocket", - "serde", - "serde_json", - "sxd-document", - "tokio", - "tracing", - "tracing-subscriber", - "uuid", -] - -[[package]] -name = "console" -version = "0.15.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" -dependencies = [ - "encode_unicode", - "lazy_static", - "libc", - "unicode-width", - "windows-sys 0.52.0", -] - -[[package]] -name = "constant_time_eq" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" - -[[package]] -name = "cookie" -version = "0.18.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" -dependencies = [ - "percent-encoding", - "time", - "version_check", -] - -[[package]] -name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" - -[[package]] -name = "cpufeatures" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" -dependencies = [ - "libc", -] - -[[package]] -name = "crc32fast" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "cucumber" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5063d8cf24f4998ad01cac265da468a15ca682a8f4f826d50e661964e8d9b8" -dependencies = [ - "anyhow", - "async-trait", - "clap", - "console", - "cucumber-codegen", - "cucumber-expressions", - "derive_more", - "drain_filter_polyfill", - "either", - "futures", - "gherkin", - "globwalk", - "humantime", - "inventory", - "itertools 0.12.1", - "lazy-regex", - "linked-hash-map", - "once_cell", - "pin-project", - "regex", - "sealed", - "smart-default", -] - -[[package]] -name = "cucumber-codegen" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01091e28d1f566c8b31b67948399d2efd6c0a8f6228a9785519ed7b73f7f0aef" -dependencies = [ - "cucumber-expressions", - "inflections", - "itertools 0.12.1", - "proc-macro2", - "quote", - "regex", - "syn 2.0.60", - "synthez", -] - -[[package]] -name = "cucumber-expressions" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d794fed319eea24246fb5f57632f7ae38d61195817b7eb659455aa5bdd7c1810" -dependencies = [ - "derive_more", - "either", - "nom", - "nom_locate", - "regex", - "regex-syntax 0.7.5", -] - -[[package]] -name = "darling" -version = "0.20.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" -dependencies = [ - "darling_core", - "darling_macro", -] - -[[package]] -name = "darling_core" -version = "0.20.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim 0.10.0", - "syn 2.0.60", -] - -[[package]] -name = "darling_macro" -version = "0.20.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" -dependencies = [ - "darling_core", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "deranged" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" -dependencies = [ - "powerfmt", - "serde", -] - -[[package]] -name = "derive_more" -version = "0.99.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "devise" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6eacefd3f541c66fc61433d65e54e0e46e0a029a819a7dbbc7a7b489e8a85f8" -dependencies = [ - "devise_codegen", - "devise_core", -] - -[[package]] -name = "devise_codegen" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8cf4b8dd484ede80fd5c547592c46c3745a617c8af278e2b72bea86b2dfed6" -dependencies = [ - "devise_core", - "quote", -] - -[[package]] -name = "devise_core" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35b50dba0afdca80b187392b24f2499a88c336d5a8493e4b4ccfb608708be56a" -dependencies = [ - "bitflags 2.5.0", - "proc-macro2", - "proc-macro2-diagnostics", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "diff" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" - -[[package]] -name = "difference" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "crypto-common", - "subtle", -] - -[[package]] -name = "drain_filter_polyfill" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "669a445ee724c5c69b1b06fe0b63e70a1c84bc9bb7d9696cd4f4e3ec45050408" - -[[package]] -name = "either" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" - -[[package]] -name = "encode_unicode" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" - -[[package]] -name = "encoding_rs" -version = "0.8.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "equivalent" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" - -[[package]] -name = "errno" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "fastrand" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984" - -[[package]] -name = "figment" -version = "0.10.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d032832d74006f99547004d49410a4b4218e4c33382d56ca3ff89df74f86b953" -dependencies = [ - "atomic 0.6.0", - "pear", - "serde", - "toml", - "uncased", - "version_check", -] - -[[package]] -name = "filetime" -version = "0.2.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "windows-sys 0.52.0", -] - -[[package]] -name = "fixedbitset" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" - -[[package]] -name = "flate2" -version = "1.0.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "form_urlencoded" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "fs2" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "futures" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" - -[[package]] -name = "futures-executor" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" - -[[package]] -name = "futures-macro" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "futures-sink" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" - -[[package]] -name = "futures-task" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" - -[[package]] -name = "futures-util" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - -[[package]] -name = "generator" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc16584ff22b460a382b7feec54b23d2908d858152e5739a120b949293bd74e" -dependencies = [ - "cc", - "libc", - "log", - "rustversion", - "windows 0.48.0", -] - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "gherkin" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20b79820c0df536d1f3a089a2fa958f61cb96ce9e0f3f8f507f5a31179567755" -dependencies = [ - "heck 0.4.1", - "peg", - "quote", - "serde", - "serde_json", - "syn 2.0.60", - "textwrap", - "thiserror", - "typed-builder", -] - -[[package]] -name = "gimli" -version = "0.28.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" - -[[package]] -name = "glob" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" - -[[package]] -name = "globset" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" -dependencies = [ - "aho-corasick", - "bstr", - "log", - "regex-automata 0.4.6", - "regex-syntax 0.8.3", -] - -[[package]] -name = "globwalk" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93e3af942408868f6934a7b85134a3230832b9977cf66125df2f9edcfce4ddcc" -dependencies = [ - "bitflags 1.3.2", - "ignore", - "walkdir", -] - -[[package]] -name = "gregorian" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18888aec42cda8438d991d59b05e5ffa1a9799b1df634346672b1fac7eb02354" -dependencies = [ - "libc", -] - -[[package]] -name = "h2" -version = "0.3.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 0.2.12", - "indexmap 2.2.6", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" - -[[package]] -name = "hashers" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2bca93b15ea5a746f220e56587f71e73c6165eab783df9e26590069953e3c30" -dependencies = [ - "fxhash", -] - -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "hermit-abi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest", -] - -[[package]] -name = "home" -version = "0.5.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http-body" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" -dependencies = [ - "bytes", - "http 0.2.12", - "pin-project-lite", -] - -[[package]] -name = "http-body" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" -dependencies = [ - "bytes", - "http 1.1.0", -] - -[[package]] -name = "http-body-util" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" -dependencies = [ - "bytes", - "futures-core", - "http 1.1.0", - "http-body 1.0.0", - "pin-project-lite", -] - -[[package]] -name = "httparse" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" - -[[package]] -name = "httpdate" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" - -[[package]] -name = "humantime" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" - -[[package]] -name = "hyper" -version = "0.14.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "h2", - "http 0.2.12", - "http-body 0.4.6", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", - "want", -] - -[[package]] -name = "hyper" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "httparse", - "itoa", - "pin-project-lite", - "smallvec", - "tokio", - "want", -] - -[[package]] -name = "hyper-rustls" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" -dependencies = [ - "futures-util", - "http 0.2.12", - "hyper 0.14.28", - "log", - "rustls 0.21.11", - "rustls-native-certs 0.6.3", - "tokio", - "tokio-rustls 0.24.1", -] - -[[package]] -name = "hyper-rustls" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" -dependencies = [ - "futures-util", - "http 1.1.0", - "hyper 1.3.1", - "hyper-util", - "rustls 0.22.4", - "rustls-pki-types", - "tokio", - "tokio-rustls 0.25.0", - "tower-service", -] - -[[package]] -name = "hyper-timeout" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" -dependencies = [ - "hyper 0.14.28", - "pin-project-lite", - "tokio", - "tokio-io-timeout", -] - -[[package]] -name = "hyper-util" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "hyper 1.3.1", - "pin-project-lite", - "socket2", - "tokio", - "tower", - "tower-service", - "tracing", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.60" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "wasm-bindgen", - "windows-core", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - -[[package]] -name = "idna" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" -dependencies = [ - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "ignore" -version = "0.4.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" -dependencies = [ - "crossbeam-deque", - "globset", - "log", - "memchr", - "regex-automata 0.4.6", - "same-file", - "walkdir", - "winapi-util", -] - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", - "serde", -] - -[[package]] -name = "indexmap" -version = "2.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" -dependencies = [ - "equivalent", - "hashbrown 0.14.3", - "serde", -] - -[[package]] -name = "indextree" -version = "4.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a6f7e29c1619ec492f411b021ac9f30649d5f522ca6f287f2467ee48c8dfe10" - -[[package]] -name = "indicatif" -version = "0.17.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" -dependencies = [ - "console", - "instant", - "number_prefix", - "portable-atomic", - "unicode-width", -] - -[[package]] -name = "inflections" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a257582fdcde896fd96463bf2d40eefea0580021c0712a0e2b028b60b47a837a" - -[[package]] -name = "inlinable_string" -version = "0.1.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" - -[[package]] -name = "inout" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" -dependencies = [ - "generic-array", -] - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "inventory" -version = "0.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f958d3d68f4167080a18141e10381e7634563984a537f2a49a30fd8e53ac5767" - -[[package]] -name = "ipnet" -version = "2.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" - -[[package]] -name = "is-terminal" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" - -[[package]] -name = "jobserver" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" -dependencies = [ - "libc", -] - -[[package]] -name = "js-sys" -version = "0.3.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "lazy-regex" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d12be4595afdf58bd19e4a9f4e24187da2a66700786ff660a418e9059937a4c" -dependencies = [ - "lazy-regex-proc_macros", - "once_cell", - "regex", -] - -[[package]] -name = "lazy-regex-proc_macros" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44bcd58e6c97a7fcbaffcdc95728b393b8d98933bfadad49ed4097845b57ef0b" -dependencies = [ - "proc-macro2", - "quote", - "regex", - "syn 2.0.60", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "lenient_semver" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de8de3f4f3754c280ce1c8c42ed8dd26a9c8385c2e5ad4ec5a77e774cea9c1ec" -dependencies = [ - "lenient_semver_parser", - "semver", -] - -[[package]] -name = "lenient_semver_parser" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f650c1d024ddc26b4bb79c3076b30030f2cf2b18292af698c81f7337a64d7d6" -dependencies = [ - "lenient_semver_version_builder", - "semver", -] - -[[package]] -name = "lenient_semver_version_builder" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9049f8ff49f75b946f95557148e70230499c8a642bf2d6528246afc7d0282d17" -dependencies = [ - "semver", -] - -[[package]] -name = "libc" -version = "0.2.153" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" - -[[package]] -name = "linked-hash-map" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" - -[[package]] -name = "linux-raw-sys" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" - -[[package]] -name = "lock_api" -version = "0.4.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" -dependencies = [ - "autocfg", - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" - -[[package]] -name = "logos" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c000ca4d908ff18ac99b93a062cb8958d331c3220719c52e77cb19cc6ac5d2c1" -dependencies = [ - "logos-derive", -] - -[[package]] -name = "logos-codegen" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc487311295e0002e452025d6b580b77bb17286de87b57138f3b5db711cded68" -dependencies = [ - "beef", - "fnv", - "proc-macro2", - "quote", - "regex-syntax 0.6.29", - "syn 2.0.60", -] - -[[package]] -name = "logos-derive" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbfc0d229f1f42d790440136d941afd806bc9e949e2bcb8faa813b0f00d1267e" -dependencies = [ - "logos-codegen", -] - -[[package]] -name = "loom" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff50ecb28bb86013e935fb6683ab1f6d3a20016f123c76fd4c27470076ac30f5" -dependencies = [ - "cfg-if", - "generator", - "scoped-tls", - "serde", - "serde_json", - "tracing", - "tracing-subscriber", -] - -[[package]] -name = "maplit" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" - -[[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata 0.1.10", -] - -[[package]] -name = "matchit" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" - -[[package]] -name = "md5" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" - -[[package]] -name = "memchr" -version = "2.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" - -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - -[[package]] -name = "miniz_oxide" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" -dependencies = [ - "adler", -] - -[[package]] -name = "mio" -version = "0.8.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" -dependencies = [ - "libc", - "wasi", - "windows-sys 0.48.0", -] - -[[package]] -name = "multer" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01acbdc23469fd8fe07ab135923371d5f5a422fbf9c522158677c8eb15bc51c2" -dependencies = [ - "bytes", - "encoding_rs", - "futures-util", - "http 0.2.12", - "httparse", - "log", - "memchr", - "mime", - "spin", - "tokio", - "tokio-util", - "version_check", -] - -[[package]] -name = "multer" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15d522be0a9c3e46fd2632e272d178f56387bdb5c9fbb3a36c649062e9b5219" -dependencies = [ - "bytes", - "encoding_rs", - "futures-util", - "http 1.1.0", - "httparse", - "log", - "memchr", - "mime", - "serde", - "serde_json", - "spin", - "version_check", -] - -[[package]] -name = "multimap" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - -[[package]] -name = "nom_locate" -version = "4.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e3c83c053b0713da60c5b8de47fe8e494fe3ece5267b2f23090a07a053ba8f3" -dependencies = [ - "bytecount", - "memchr", - "nom", -] - -[[package]] -name = "ntapi" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4" -dependencies = [ - "winapi", -] - -[[package]] -name = "nu-ansi-term" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" -dependencies = [ - "overload", - "winapi", -] - -[[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - -[[package]] -name = "num-traits" -version = "0.2.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" -dependencies = [ - "autocfg", -] - -[[package]] -name = "num_cpus" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "number_prefix" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" - -[[package]] -name = "object" -version = "0.32.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" -dependencies = [ - "memchr", -] - -[[package]] -name = "once_cell" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" - -[[package]] -name = "onig" -version = "6.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c4b31c8722ad9171c6d77d3557db078cab2bd50afcc9d09c8b315c59df8ca4f" -dependencies = [ - "bitflags 1.3.2", - "libc", - "once_cell", - "onig_sys", -] - -[[package]] -name = "onig_sys" -version = "69.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b829e3d7e9cc74c7e315ee8edb185bf4190da5acde74afd7fc59c35b1f086e7" -dependencies = [ - "cc", - "pkg-config", -] - -[[package]] -name = "openssl-probe" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" - -[[package]] -name = "os_info" -version = "3.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae99c7fa6dd38c7cafe1ec085e804f8f555a2f8659b0dbe03f1f9963a9b51092" -dependencies = [ - "log", - "serde", - "windows-sys 0.52.0", -] - -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - -[[package]] -name = "pact-plugin-driver" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dafb1371bf02e0fa25212061d41cc5cbc4ad1116d1e988a86eede5e5bb338931" -dependencies = [ - "anyhow", - "async-trait", - "backtrace", - "bytes", - "chrono", - "flate2", - "futures-util", - "home", - "indicatif", - "itertools 0.12.1", - "lazy_static", - "log", - "maplit", - "md5", - "os_info", - "pact_models", - "prost", - "prost-types", - "regex", - "reqwest 0.12.4", - "semver", - "serde", - "serde_json", - "sha2", - "sysinfo", - "tar", - "tokio", - "toml", - "tonic", - "tonic-build", - "tracing", - "tracing-core", - "uuid", - "zip", -] - -[[package]] -name = "pact_consumer" -version = "1.2.1" -dependencies = [ - "anyhow", - "async-trait", - "bytes", - "futures", - "itertools 0.12.1", - "lazy_static", - "maplit", - "pact-plugin-driver", - "pact_matching", - "pact_mock_server", - "pact_models", - "regex", - "serde_json", - "tokio", - "tracing", - "tracing-core", - "url", - "uuid", -] - -[[package]] -name = "pact_matching" -version = "1.2.2" -dependencies = [ - "ansi_term", - "anyhow", - "base64 0.22.0", - "bytes", - "chrono", - "difference", - "futures", - "hex", - "http 1.1.0", - "itertools 0.12.1", - "lazy_static", - "lenient_semver", - "maplit", - "md5", - "mime", - "multer 3.0.0", - "nom", - "onig", - "pact-plugin-driver", - "pact_models", - "rand", - "reqwest 0.12.4", - "semver", - "serde", - "serde_json", - "serde_urlencoded", - "sxd-document", - "tokio", - "tracing", - "tracing-core", - "tree_magic_mini", - "uuid", -] - -[[package]] -name = "pact_mock_server" -version = "1.2.7" -dependencies = [ - "anyhow", - "bytes", - "futures", - "hyper 0.14.28", - "hyper-rustls 0.24.2", - "itertools 0.12.1", - "lazy_static", - "maplit", - "pact-plugin-driver", - "pact_matching", - "pact_models", - "rustls 0.21.11", - "rustls-pemfile 1.0.4", - "serde", - "serde_json", - "thiserror", - "tokio", - "tokio-rustls 0.24.1", - "tracing", - "tracing-core", - "url", - "uuid", -] - -[[package]] -name = "pact_models" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf3bf784d5fc22d0332041fa8f9dd9992e0ce2b22236462847ca1b1377297e10" -dependencies = [ - "anyhow", - "ariadne", - "base64 0.21.7", - "bytes", - "chrono", - "chrono-tz", - "fs2", - "gregorian", - "hashers", - "hex", - "indextree", - "itertools 0.10.5", - "lazy_static", - "lenient_semver", - "logos", - "maplit", - "mime", - "nom", - "onig", - "parse-zoneinfo", - "rand", - "rand_regex", - "regex", - "regex-syntax 0.6.29", - "reqwest 0.11.27", - "semver", - "serde", - "serde_json", - "sxd-document", - "tracing", - "uuid", -] - -[[package]] -name = "pact_verifier" -version = "1.2.1" -dependencies = [ - "ansi_term", - "anyhow", - "async-trait", - "base64 0.22.0", - "bytes", - "difference", - "futures", - "http 1.1.0", - "humantime", - "itertools 0.12.1", - "lazy_static", - "maplit", - "mime", - "pact-plugin-driver", - "pact_matching", - "pact_models", - "regex", - "reqwest 0.12.4", - "serde", - "serde_json", - "serde_with", - "thiserror", - "tokio", - "tracing", - "tracing-core", - "url", - "urlencoding", -] - -[[package]] -name = "parking_lot" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-targets 0.48.5", -] - -[[package]] -name = "parse-zoneinfo" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c705f256449c60da65e11ff6626e0c16a0a0b96aaa348de61376b249bc340f41" -dependencies = [ - "regex", -] - -[[package]] -name = "password-hash" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" -dependencies = [ - "base64ct", - "rand_core", - "subtle", -] - -[[package]] -name = "pbkdf2" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" -dependencies = [ - "digest", - "hmac", - "password-hash", - "sha2", -] - -[[package]] -name = "pear" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdeeaa00ce488657faba8ebf44ab9361f9365a97bd39ffb8a60663f57ff4b467" -dependencies = [ - "inlinable_string", - "pear_codegen", - "yansi 1.0.1", -] - -[[package]] -name = "pear_codegen" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bab5b985dc082b345f812b7df84e1bef27e7207b39e448439ba8bd69c93f147" -dependencies = [ - "proc-macro2", - "proc-macro2-diagnostics", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "peg" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f76678828272f177ac33b7e2ac2e3e73cc6c1cd1e3e387928aa69562fa51367" -dependencies = [ - "peg-macros", - "peg-runtime", -] - -[[package]] -name = "peg-macros" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "636d60acf97633e48d266d7415a9355d4389cea327a193f87df395d88cd2b14d" -dependencies = [ - "peg-runtime", - "proc-macro2", - "quote", -] - -[[package]] -name = "peg-runtime" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9555b1514d2d99d78150d3c799d4c357a3e2c2a8062cd108e93a06d9057629c5" - -[[package]] -name = "percent-encoding" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" - -[[package]] -name = "peresil" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f658886ed52e196e850cfbbfddab9eaa7f6d90dd0929e264c31e5cec07e09e57" - -[[package]] -name = "petgraph" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" -dependencies = [ - "fixedbitset", - "indexmap 2.2.6", -] - -[[package]] -name = "phf" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" -dependencies = [ - "phf_shared", -] - -[[package]] -name = "phf_codegen" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" -dependencies = [ - "phf_generator", - "phf_shared", -] - -[[package]] -name = "phf_generator" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" -dependencies = [ - "phf_shared", - "rand", -] - -[[package]] -name = "phf_shared" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" -dependencies = [ - "siphasher", -] - -[[package]] -name = "pin-project" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "pkg-config" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" - -[[package]] -name = "portable-atomic" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" - -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "ppv-lite86" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" - -[[package]] -name = "pretty_assertions" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" -dependencies = [ - "diff", - "yansi 0.5.1", -] - -[[package]] -name = "prettyplease" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ac2cf0f2e4f42b49f5ffd07dae8d746508ef7526c13940e5f524012ae6c6550" -dependencies = [ - "proc-macro2", - "syn 2.0.60", -] - -[[package]] -name = "proc-macro2" -version = "1.0.81" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "proc-macro2-diagnostics" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", - "version_check", - "yansi 1.0.1", -] - -[[package]] -name = "prost" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f5d036824e4761737860779c906171497f6d55681139d8312388f8fe398922" -dependencies = [ - "bytes", - "prost-derive", -] - -[[package]] -name = "prost-build" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80b776a1b2dc779f5ee0641f8ade0125bc1298dd41a9a0c16d8bd57b42d222b1" -dependencies = [ - "bytes", - "heck 0.5.0", - "itertools 0.12.1", - "log", - "multimap", - "once_cell", - "petgraph", - "prettyplease", - "prost", - "prost-types", - "regex", - "syn 2.0.60", - "tempfile", -] - -[[package]] -name = "prost-derive" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19de2de2a00075bf566bee3bd4db014b11587e84184d3f7a791bc17f1a8e9e48" -dependencies = [ - "anyhow", - "itertools 0.12.1", - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "prost-types" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3235c33eb02c1f1e212abdbe34c78b264b038fb58ca612664343271e36e55ffe" -dependencies = [ - "prost", -] - -[[package]] -name = "quote" -version = "1.0.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom", -] - -[[package]] -name = "rand_regex" -version = "0.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b2a9fe2d7d9eeaf3279d1780452a5bbd26b31b27938787ef1c3e930d1e9cfbd" -dependencies = [ - "rand", - "regex-syntax 0.6.29", -] - -[[package]] -name = "rayon" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "ref-cast" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4846d4c50d1721b1a3bef8af76924eef20d5e723647333798c1b519b3a9473f" -dependencies = [ - "ref-cast-impl", -] - -[[package]] -name = "ref-cast-impl" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fddb4f8d99b0a2ebafc65a87a69a7b9875e4b1ae1f00db265d300ef7f28bccc" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "regex" -version = "1.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata 0.4.6", - "regex-syntax 0.8.3", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", -] - -[[package]] -name = "regex-automata" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax 0.8.3", -] - -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - -[[package]] -name = "regex-syntax" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" - -[[package]] -name = "regex-syntax" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" - -[[package]] -name = "reqwest" -version = "0.11.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" -dependencies = [ - "base64 0.21.7", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.28", - "hyper-rustls 0.24.2", - "ipnet", - "js-sys", - "log", - "mime", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls 0.21.11", - "rustls-native-certs 0.6.3", - "rustls-pemfile 1.0.4", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper", - "system-configuration", - "tokio", - "tokio-rustls 0.24.1", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "winreg 0.50.0", -] - -[[package]] -name = "reqwest" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" -dependencies = [ - "async-compression", - "base64 0.22.0", - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "http-body-util", - "hyper 1.3.1", - "hyper-rustls 0.26.0", - "hyper-util", - "ipnet", - "js-sys", - "log", - "mime", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls 0.22.4", - "rustls-native-certs 0.7.0", - "rustls-pemfile 2.1.2", - "rustls-pki-types", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper", - "tokio", - "tokio-rustls 0.25.0", - "tokio-util", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-streams", - "web-sys", - "winreg 0.52.0", -] - -[[package]] -name = "ring" -version = "0.17.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" -dependencies = [ - "cc", - "cfg-if", - "getrandom", - "libc", - "spin", - "untrusted", - "windows-sys 0.52.0", -] - -[[package]] -name = "rocket" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e7bb57ccb26670d73b6a47396c83139447b9e7878cab627fdfe9ea8da489150" -dependencies = [ - "async-stream", - "async-trait", - "atomic 0.5.3", - "binascii", - "bytes", - "either", - "figment", - "futures", - "indexmap 2.2.6", - "log", - "memchr", - "multer 2.1.0", - "num_cpus", - "parking_lot", - "pin-project-lite", - "rand", - "ref-cast", - "rocket_codegen", - "rocket_http", - "serde", - "serde_json", - "state", - "tempfile", - "time", - "tokio", - "tokio-stream", - "tokio-util", - "ubyte", - "version_check", - "yansi 1.0.1", -] - -[[package]] -name = "rocket_codegen" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2238066abf75f21be6cd7dc1a09d5414a671f4246e384e49fe3f8a4936bd04c" -dependencies = [ - "devise", - "glob", - "indexmap 2.2.6", - "proc-macro2", - "quote", - "rocket_http", - "syn 2.0.60", - "unicode-xid", - "version_check", -] - -[[package]] -name = "rocket_http" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37a1663694d059fe5f943ea5481363e48050acedd241d46deb2e27f71110389e" -dependencies = [ - "cookie", - "either", - "futures", - "http 0.2.12", - "hyper 0.14.28", - "indexmap 2.2.6", - "log", - "memchr", - "pear", - "percent-encoding", - "pin-project-lite", - "ref-cast", - "serde", - "smallvec", - "stable-pattern", - "state", - "time", - "tokio", - "uncased", -] - -[[package]] -name = "rustc-demangle" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" - -[[package]] -name = "rustix" -version = "0.38.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" -dependencies = [ - "bitflags 2.5.0", - "errno", - "libc", - "linux-raw-sys", - "windows-sys 0.52.0", -] - -[[package]] -name = "rustls" -version = "0.21.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fecbfb7b1444f477b345853b1fce097a2c6fb637b2bfb87e6bc5db0f043fae4" -dependencies = [ - "log", - "ring", - "rustls-webpki 0.101.7", - "sct", -] - -[[package]] -name = "rustls" -version = "0.22.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" -dependencies = [ - "log", - "ring", - "rustls-pki-types", - "rustls-webpki 0.102.3", - "subtle", - "zeroize", -] - -[[package]] -name = "rustls-native-certs" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" -dependencies = [ - "openssl-probe", - "rustls-pemfile 1.0.4", - "schannel", - "security-framework", -] - -[[package]] -name = "rustls-native-certs" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" -dependencies = [ - "openssl-probe", - "rustls-pemfile 2.1.2", - "rustls-pki-types", - "schannel", - "security-framework", -] - -[[package]] -name = "rustls-pemfile" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" -dependencies = [ - "base64 0.21.7", -] - -[[package]] -name = "rustls-pemfile" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" -dependencies = [ - "base64 0.22.0", - "rustls-pki-types", -] - -[[package]] -name = "rustls-pki-types" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecd36cc4259e3e4514335c4a138c6b43171a8d61d8f5c9348f9fc7529416f247" - -[[package]] -name = "rustls-webpki" -version = "0.101.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "rustls-webpki" -version = "0.102.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3bce581c0dd41bce533ce695a1437fa16a7ab5ac3ccfa99fe1a620a7885eabf" -dependencies = [ - "ring", - "rustls-pki-types", - "untrusted", -] - -[[package]] -name = "rustversion" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80af6f9131f277a45a3fba6ce8e2258037bb0477a67e610d3c1fe046ab31de47" - -[[package]] -name = "ryu" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "schannel" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "scoped-tls" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "sct" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "sealed" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a8caec23b7800fb97971a1c6ae365b6239aaeddfb934d6265f8505e795699d" -dependencies = [ - "heck 0.4.1", - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "security-framework" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "770452e37cad93e0a50d5abc3990d2bc351c36d0328f86cefec2f2fb206eaef6" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41f3cc463c0ef97e11c3461a9d3787412d30e8e7eb907c79180c4a57bf7c04ef" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "semver" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" - -[[package]] -name = "serde" -version = "1.0.198" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9846a40c979031340571da2545a4e5b7c4163bdae79b301d5f86d03979451fcc" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.198" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e88edab869b01783ba905e7d0153f9fc1a6505a96e4ad3018011eedb838566d9" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "serde_json" -version = "1.0.116" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "serde_spanned" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "serde_with" -version = "3.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee80b0e361bbf88fd2f6e242ccd19cfda072cb0faa6ae694ecee08199938569a" -dependencies = [ - "base64 0.21.7", - "chrono", - "hex", - "indexmap 1.9.3", - "indexmap 2.2.6", - "serde", - "serde_derive", - "serde_json", - "serde_with_macros", - "time", -] - -[[package]] -name = "serde_with_macros" -version = "3.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6561dc161a9224638a31d876ccdfefbc1df91d3f3a8342eddb35f055d48c7655" -dependencies = [ - "darling", - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sha2" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "signal-hook-registry" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" -dependencies = [ - "libc", -] - -[[package]] -name = "siphasher" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" - -[[package]] -name = "slab" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] - -[[package]] -name = "smallvec" -version = "1.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" - -[[package]] -name = "smart-default" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eb01866308440fc64d6c44d9e86c5cc17adfe33c4d6eed55da9145044d0ffc1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "smawk" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" - -[[package]] -name = "socket2" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" - -[[package]] -name = "stable-pattern" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4564168c00635f88eaed410d5efa8131afa8d8699a612c80c455a0ba05c21045" -dependencies = [ - "memchr", -] - -[[package]] -name = "state" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b8c4a4445d81357df8b1a650d0d0d6fbbbfe99d064aa5e02f3e4022061476d8" -dependencies = [ - "loom", -] - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "subtle" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" - -[[package]] -name = "sxd-document" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94d82f37be9faf1b10a82c4bd492b74f698e40082f0f40de38ab275f31d42078" -dependencies = [ - "peresil", - "typed-arena", -] - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.60" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - -[[package]] -name = "synthez" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3d2c2202510a1e186e63e596d9318c91a8cbe85cd1a56a7be0c333e5f59ec8d" -dependencies = [ - "syn 2.0.60", - "synthez-codegen", - "synthez-core", -] - -[[package]] -name = "synthez-codegen" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f724aa6d44b7162f3158a57bccd871a77b39a4aef737e01bcdff41f4772c7746" -dependencies = [ - "syn 2.0.60", - "synthez-core", -] - -[[package]] -name = "synthez-core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78bfa6ec52465e2425fd43ce5bbbe0f0b623964f7c63feb6b10980e816c654ea" -dependencies = [ - "proc-macro2", - "quote", - "sealed", - "syn 2.0.60", -] - -[[package]] -name = "sysinfo" -version = "0.30.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87341a165d73787554941cd5ef55ad728011566fe714e987d1b976c15dbc3a83" -dependencies = [ - "cfg-if", - "core-foundation-sys", - "libc", - "ntapi", - "once_cell", - "rayon", - "windows 0.52.0", -] - -[[package]] -name = "system-configuration" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "system-configuration-sys", -] - -[[package]] -name = "system-configuration-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "tar" -version = "0.4.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" -dependencies = [ - "filetime", - "libc", - "xattr", -] - -[[package]] -name = "tempfile" -version = "3.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" -dependencies = [ - "cfg-if", - "fastrand", - "rustix", - "windows-sys 0.52.0", -] - -[[package]] -name = "terminal_size" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" -dependencies = [ - "rustix", - "windows-sys 0.48.0", -] - -[[package]] -name = "textwrap" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" -dependencies = [ - "smawk", - "unicode-linebreak", - "unicode-width", -] - -[[package]] -name = "thiserror" -version = "1.0.59" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0126ad08bff79f29fc3ae6a55cc72352056dfff61e3ff8bb7129476d44b23aa" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.59" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cd413b5d558b4c5bf3680e324a6fa5014e7b7c067a51e69dbdf47eb7148b66" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "thread_local" -version = "1.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" -dependencies = [ - "cfg-if", - "once_cell", -] - -[[package]] -name = "time" -version = "0.3.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" -dependencies = [ - "deranged", - "itoa", - "num-conv", - "powerfmt", - "serde", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" - -[[package]] -name = "time-macros" -version = "0.2.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" -dependencies = [ - "num-conv", - "time-core", -] - -[[package]] -name = "tinyvec" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.37.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" -dependencies = [ - "backtrace", - "bytes", - "libc", - "mio", - "num_cpus", - "parking_lot", - "pin-project-lite", - "signal-hook-registry", - "socket2", - "tokio-macros", - "windows-sys 0.48.0", -] - -[[package]] -name = "tokio-io-timeout" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" -dependencies = [ - "pin-project-lite", - "tokio", -] - -[[package]] -name = "tokio-macros" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls 0.21.11", - "tokio", -] - -[[package]] -name = "tokio-rustls" -version = "0.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" -dependencies = [ - "rustls 0.22.4", - "rustls-pki-types", - "tokio", -] - -[[package]] -name = "tokio-stream" -version = "0.1.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "tokio-util" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" -dependencies = [ - "bytes", - "futures-core", - "futures-sink", - "pin-project-lite", - "tokio", - "tracing", -] - -[[package]] -name = "toml" -version = "0.8.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9dd1545e8208b4a5af1aa9bbd0b4cf7e9ea08fabc5d0a5c67fcaafa17433aa3" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit", -] - -[[package]] -name = "toml_datetime" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_edit" -version = "0.22.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3328d4f68a705b2a4498da1d580585d39a6510f98318a2cec3018a7ec61ddef" -dependencies = [ - "indexmap 2.2.6", - "serde", - "serde_spanned", - "toml_datetime", - "winnow", -] - -[[package]] -name = "tonic" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76c4eb7a4e9ef9d4763600161f12f5070b92a578e1b634db88a6887844c91a13" -dependencies = [ - "async-stream", - "async-trait", - "axum", - "base64 0.21.7", - "bytes", - "h2", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.28", - "hyper-timeout", - "percent-encoding", - "pin-project", - "prost", - "tokio", - "tokio-stream", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tonic-build" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4ef6dd70a610078cb4e338a0f79d06bc759ff1b22d2120c2ff02ae264ba9c2" -dependencies = [ - "prettyplease", - "proc-macro2", - "prost-build", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "tower" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" -dependencies = [ - "futures-core", - "futures-util", - "indexmap 1.9.3", - "pin-project", - "pin-project-lite", - "rand", - "slab", - "tokio", - "tokio-util", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-layer" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" - -[[package]] -name = "tower-service" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" - -[[package]] -name = "tracing" -version = "0.1.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" -dependencies = [ - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "tracing-core" -version = "0.1.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" -dependencies = [ - "once_cell", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" -dependencies = [ - "matchers", - "nu-ansi-term", - "once_cell", - "regex", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log", -] - -[[package]] -name = "tree_magic_mini" -version = "3.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ee137597cdb361b55a4746983e4ac1b35ab6024396a419944ad473bb915265" -dependencies = [ - "fnv", - "home", - "memchr", - "nom", - "once_cell", - "petgraph", -] - -[[package]] -name = "try-lock" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" - -[[package]] -name = "typed-arena" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9b2228007eba4120145f785df0f6c92ea538f5a3635a612ecf4e334c8c1446d" - -[[package]] -name = "typed-builder" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe83c85a85875e8c4cb9ce4a890f05b23d38cd0d47647db7895d3d2a79566d2" -dependencies = [ - "typed-builder-macro", -] - -[[package]] -name = "typed-builder-macro" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29a3151c41d0b13e3d011f98adc24434560ef06673a155a6c7f66b9879eecce2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", -] - -[[package]] -name = "typenum" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" - -[[package]] -name = "ubyte" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f720def6ce1ee2fc44d40ac9ed6d3a59c361c80a75a7aa8e75bb9baed31cf2ea" -dependencies = [ - "serde", -] - -[[package]] -name = "uncased" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697" -dependencies = [ - "serde", - "version_check", -] - -[[package]] -name = "unicode-bidi" -version = "0.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" - -[[package]] -name = "unicode-ident" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" - -[[package]] -name = "unicode-linebreak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" - -[[package]] -name = "unicode-normalization" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-width" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" - -[[package]] -name = "unicode-xid" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" - -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - -[[package]] -name = "url" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", -] - -[[package]] -name = "urlencoding" -version = "2.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" - -[[package]] -name = "utf8parse" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" - -[[package]] -name = "uuid" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0" -dependencies = [ - "getrandom", -] - -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "walkdir" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" -dependencies = [ - "same-file", - "winapi-util", -] - -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasm-bindgen" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" -dependencies = [ - "cfg-if", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" -dependencies = [ - "bumpalo", - "log", - "once_cell", - "proc-macro2", - "quote", - "syn 2.0.60", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" -dependencies = [ - "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.60", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" - -[[package]] -name = "wasm-streams" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" -dependencies = [ - "futures-util", - "js-sys", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "web-sys" -version = "0.3.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" -dependencies = [ - "winapi", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" -dependencies = [ - "windows-core", - "windows-targets 0.52.5", -] - -[[package]] -name = "windows-core" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" -dependencies = [ - "windows-targets 0.52.5", -] - -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.5", -] - -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - -[[package]] -name = "windows-targets" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" -dependencies = [ - "windows_aarch64_gnullvm 0.52.5", - "windows_aarch64_msvc 0.52.5", - "windows_i686_gnu 0.52.5", - "windows_i686_gnullvm", - "windows_i686_msvc 0.52.5", - "windows_x86_64_gnu 0.52.5", - "windows_x86_64_gnullvm 0.52.5", - "windows_x86_64_msvc 0.52.5", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" - -[[package]] -name = "winnow" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c976aaaa0e1f90dbb21e9587cdaf1d9679a1cde8875c0d6bd83ab96a208352" -dependencies = [ - "memchr", -] - -[[package]] -name = "winreg" -version = "0.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" -dependencies = [ - "cfg-if", - "windows-sys 0.48.0", -] - -[[package]] -name = "winreg" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" -dependencies = [ - "cfg-if", - "windows-sys 0.48.0", -] - -[[package]] -name = "xattr" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" -dependencies = [ - "libc", - "linux-raw-sys", - "rustix", -] - -[[package]] -name = "yansi" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" - -[[package]] -name = "yansi" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" -dependencies = [ - "is-terminal", -] - -[[package]] -name = "zeroize" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" - -[[package]] -name = "zip" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" -dependencies = [ - "aes", - "byteorder", - "bzip2", - "constant_time_eq", - "crc32fast", - "crossbeam-utils", - "flate2", - "hmac", - "pbkdf2", - "sha1", - "time", - "zstd", -] - -[[package]] -name = "zstd" -version = "0.11.2+zstd.1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" -dependencies = [ - "zstd-safe", -] - -[[package]] -name = "zstd-safe" -version = "5.0.2+zstd.1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db" -dependencies = [ - "libc", - "zstd-sys", -] - -[[package]] -name = "zstd-sys" -version = "2.0.10+zstd.1.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa" -dependencies = [ - "cc", - "pkg-config", -] diff --git a/compatibility-suite/Cargo.toml b/compatibility-suite/Cargo.toml deleted file mode 100644 index 5a04f8c2f..000000000 --- a/compatibility-suite/Cargo.toml +++ /dev/null @@ -1,63 +0,0 @@ -[package] -name = "compatibility-suite" -version = "0.0.0" -edition = "2021" - -[dev-dependencies] -anyhow = "1.0.75" -async-trait = "0.1.74" -base64 = "0.22.0" -bytes = "1.5.0" -cucumber = "0.20.1" -futures = "0.3.29" -itertools = "0.12.1" -lazy_static = "1.4.0" -maplit = "1.0.2" -pact_models = { version = "~1.2.0" } -pact_matching = { version = "1.1.9", path = "../rust/pact_matching" } -pact_mock_server = { version = "1.2.6", path = "../rust/pact_mock_server" } -pact_verifier = { version = "1.1.0", path = "../rust/pact_verifier" } -pact_consumer = { version = "1.2.0", path = "../rust/pact_consumer" } -pretty_assertions = "1.4.0" -regex = "1.10.2" -reqwest = { version = "0.12.3", default-features = false, features = ["rustls-tls-native-roots", "json"] } -rocket = { version = "0.5.0", features = ["json"] } -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0.108" -sxd-document = "0.3.2" -tokio = { version = "1.33.0", features = ["full"] } -tracing = "0.1.40" -tracing-subscriber = { version = "0.3.17", features = ["env-filter", "tracing-log", "fmt"] } -uuid = { version = "1.5.0", features = ["v4"] } - -[[test]] -name = "v1_consumer" -harness = false - -[[test]] -name = "v1_provider" -harness = false - -[[test]] -name = "v2_consumer" -harness = false - -[[test]] -name = "v2_provider" -harness = false - -[[test]] -name = "v3" -harness = false - -[[test]] -name = "v3_provider" -harness = false - -[[test]] -name = "v3_message" -harness = false - -[[test]] -name = "v4" -harness = false diff --git a/compatibility-suite/build.rs b/compatibility-suite/build.rs deleted file mode 100644 index b35767652..000000000 --- a/compatibility-suite/build.rs +++ /dev/null @@ -1,7 +0,0 @@ -use std::env; - -fn main() { - if env::var("CARGO_CFG_TARGET_ENV").as_deref() == Ok("msvc") { - println!("cargo:rustc-link-arg=/stack:{}", 8 * 1024 * 1024); - } -} diff --git a/compatibility-suite/pact-compatibility-suite/.github/workflows/triage.yml b/compatibility-suite/pact-compatibility-suite/.github/workflows/triage.yml deleted file mode 100644 index eb5ec3054..000000000 --- a/compatibility-suite/pact-compatibility-suite/.github/workflows/triage.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: Triage Issue - -on: - issues: - types: - - opened - - labeled - pull_request: - types: - - labeled - -jobs: - call-workflow: - uses: pact-foundation/.github/.github/workflows/triage.yml@master - secrets: inherit diff --git a/compatibility-suite/pact-compatibility-suite/.gitignore b/compatibility-suite/pact-compatibility-suite/.gitignore deleted file mode 100644 index e3c7e3bb5..000000000 --- a/compatibility-suite/pact-compatibility-suite/.gitignore +++ /dev/null @@ -1,24 +0,0 @@ -.gradle -**/build/ -!src/**/build/ - -# Ignore Gradle GUI config -gradle-app.setting - -# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) -!gradle-wrapper.jar - -# Avoid ignore Gradle wrappper properties -!gradle-wrapper.properties - -# Cache of project -.gradletasknamecache - -# Eclipse Gradle plugin generated files -# Eclipse Core -.project -# JDT-specific (Eclipse Java Development Tools) -.classpath - -# Idea project files -.idea/ diff --git a/compatibility-suite/pact-compatibility-suite/LICENSE b/compatibility-suite/pact-compatibility-suite/LICENSE deleted file mode 100644 index 261eeb9e9..000000000 --- a/compatibility-suite/pact-compatibility-suite/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/compatibility-suite/pact-compatibility-suite/README.md b/compatibility-suite/pact-compatibility-suite/README.md deleted file mode 100644 index 955ac495d..000000000 --- a/compatibility-suite/pact-compatibility-suite/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# pact-compatibility-suite -Set of BDD style tests to check compatibility between Pact implementations. - -This repository contains the BDD features for verifying a Pact implementation. It requires the [Cucumber BDD](https://cucumber.io/) test tool to execute. - -## Adding it to a project -The easiest way to add the suite to a project to create a compatibility-suite subdirectory and then use the Git subtree command to pull the features and fixtures. -The project then needs the steps to be implemented to get the features to pass. - -Recommend project layout: - -``` -compatibility-suite - pact-compatibility-suite (subtree from this repo) - steps (code for the steps, can be named anything) -``` - -For examples of how this has been implemented, see https://github.com/pact-foundation/pact-reference/tree/master/compatibility-suite and https://github.com/pact-foundation/pact-jvm/tree/master/compatibility-suite. - -## Fixtures - -The project has a number of fixture files that the features refer to. These files have the folowing formats. - -### Body contents (XML) -Any file ending in `-body.xml` contains data to setup the contents of a request, response or messages. It can contain the following elements. - -#### body -This is the root element. - -#### body/contentType -This sets the content type of the body. It must be a valid MIME type. If not provided, it will default to either `text/plain` or `application/octet-stream`. - -#### body/contents -The contents of the body. If newlines are required to be preserved, wrap the contents in a CDATA block. If the contents require the line endings to be CRLF -(for instance, MIME multipart formats require CRLF line endings), set the attrribute `eol="CRLF"`. - -### Matcher fragments -Any JSON file with a pattern `[matcher]-matcher-[type]-[format].json` or `[matcher]-matcher-[format].json` (i.e. `regex-matcher-header-v2.json`) contains matching rules -in format presisted in Pact files. They can be loaded and added to any request, response or message. - -### All other files -All other files will be used as data for the contents of requests, responses or messages. The content type will be derived from the file extension. diff --git a/compatibility-suite/pact-compatibility-suite/features/V1/http_consumer.feature b/compatibility-suite/pact-compatibility-suite/features/V1/http_consumer.feature deleted file mode 100644 index 2425db197..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V1/http_consumer.feature +++ /dev/null @@ -1,228 +0,0 @@ -@consumer -Feature: Basic HTTP consumer - Supports basic HTTP consumer interactions - - Background: - Given the following HTTP interactions have been defined: - | No | method | path | query | headers | body | response | response content | response body | - | 1 | GET | /basic | | | | 200 | application/json | file: basic.json | - | 2 | GET | /with_params | a=1&b=2 | | | 200 | | | - | 3 | GET | /with_headers | | 'X-TEST: Compatibility' | | 200 | | | - | 4 | PUT | /basic | | | file: basic.json | 200 | | | - | 5 | PUT | /plain | | | file: text-body.xml | 200 | | | - | 6 | PUT | /xml | | | file: xml-body.xml | 200 | | | - | 7 | PUT | /bin | | | file: rat.jpg | 200 | | | - | 8 | PUT | /form | | | file: form-post-body.xml | 200 | | | - | 9 | PUT | /multipart | | | file: multipart-body.xml | 200 | | | - - Scenario: When all requests are made to the mock server - When the mock server is started with interaction 1 - And request 1 is made to the mock server - Then a 200 success response is returned - And the payload will contain the "basic" JSON document - And the content type will be set as "application/json" - When the pact test is done - Then the mock server status will be OK - And the mock server will write out a Pact file for the interaction when done - And the pact file will contain {1} interaction - And the {first} interaction request will be for a "GET" - And the {first} interaction response will contain the "basic.json" document - - Scenario: When not all requests are made to the mock server - When the mock server is started with interactions "1, 2" - And request 1 is made to the mock server - Then a 200 success response is returned - When the pact test is done - Then the mock server status will NOT be OK - And the mock server will NOT write out a Pact file for the interactions when done - And the mock server status will be an expected but not received error for interaction {2} - - Scenario: When an unexpected request is made to the mock server - When the mock server is started with interaction 1 - And request 2 is made to the mock server - Then a 500 error response is returned - When the pact test is done - Then the mock server status will NOT be OK - And the mock server will NOT write out a Pact file for the interactions when done - And the mock server status will be an unexpected "GET" request received error for interaction {2} - - Scenario: Request with query parameters - When the mock server is started with interaction 2 - And request 2 is made to the mock server - Then a 200 success response is returned - When the pact test is done - Then the mock server status will be OK - And the mock server will write out a Pact file for the interaction when done - And the pact file will contain {1} interaction - And the {first} interaction request query parameters will be "a=1&b=2" - - Scenario: Request with invalid query parameters - When the mock server is started with interaction 2 - And request 2 is made to the mock server with the following changes: - | query | - | a=1&c=3 | - Then a 500 error response is returned - When the pact test is done - Then the mock server status will NOT be OK - And the mock server status will be mismatches - And the mismatches will contain a "query" mismatch with error "Expected query parameter 'b' but was missing" - And the mismatches will contain a "query" mismatch with error "Unexpected query parameter 'c' received" - And the mock server will NOT write out a Pact file for the interaction when done - - Scenario: Request with invalid path - When the mock server is started with interaction 1 - And request 1 is made to the mock server with the following changes: - | path | - | /path | - Then a 500 error response is returned - When the pact test is done - Then the mock server status will NOT be OK - And the mock server will NOT write out a Pact file for the interaction when done - And the mock server status will be an unexpected "GET" request received error for path "/path" - - Scenario: Request with invalid method - When the mock server is started with interaction 1 - And request 1 is made to the mock server with the following changes: - | method | - | HEAD | - Then a 500 error response is returned - When the pact test is done - Then the mock server status will NOT be OK - And the mock server will NOT write out a Pact file for the interaction when done - And the mock server status will be an unexpected "HEAD" request received error for path "/basic" - - Scenario: Request with headers - When the mock server is started with interaction 3 - And request 3 is made to the mock server - Then a 200 success response is returned - When the pact test is done - Then the mock server status will be OK - And the mock server will write out a Pact file for the interaction when done - And the pact file will contain {1} interaction - And the {first} interaction request will contain the header "X-TEST" with value "Compatibility" - - Scenario: Request with invalid headers - When the mock server is started with interaction 3 - And request 3 is made to the mock server with the following changes: - | headers | - | 'X-OTHER: Something' | - Then a 500 error response is returned - When the pact test is done - Then the mock server status will NOT be OK - And the mock server status will be mismatches - And the mismatches will contain a "header" mismatch with error "Expected a header 'X-TEST' but was missing" - And the mock server will NOT write out a Pact file for the interaction when done - - Scenario: Request with body - When the mock server is started with interaction 4 - And request 4 is made to the mock server - Then a 200 success response is returned - When the pact test is done - Then the mock server status will be OK - And the mock server will write out a Pact file for the interaction when done - And the pact file will contain {1} interaction - And the {first} interaction request will be for a "PUT" - And the {first} interaction request content type will be "application/json" - And the {first} interaction request will contain the "basic.json" document - - Scenario: Request with invalid body - When the mock server is started with interaction 4 - And request 4 is made to the mock server with the following changes: - | body | - | JSON: {"one": "a", "two": "c"} | - Then a 500 error response is returned - When the pact test is done - Then the mock server status will NOT be OK - And the mock server status will be mismatches - And the mismatches will contain a "body" mismatch with path "$.two" with error "Expected 'c' (String) to be equal to 'b' (String)" - And the mock server will NOT write out a Pact file for the interaction when done - - Scenario: Request with the incorrect type of body contents - When the mock server is started with interaction 4 - And request 4 is made to the mock server with the following changes: - | body | - | XML: | - Then a 500 error response is returned - When the pact test is done - Then the mock server status will NOT be OK - And the mock server status will be mismatches - And the mismatches will contain a "body-content-type" mismatch with error "Expected a body of 'application/json' but the actual content type was 'application/xml'" - And the mock server will NOT write out a Pact file for the interaction when done - - Scenario: Request with plain text body (positive case) - When the mock server is started with interaction 5 - And request 5 is made to the mock server - Then a 200 success response is returned - - Scenario: Request with plain text body (negative case) - When the mock server is started with interaction 5 - And request 5 is made to the mock server with the following changes: - | body | - | Hello Mars! | - Then a 500 error response is returned - And the mismatches will contain a "body" mismatch with error "Expected body 'Hello World!' to match 'Hello Mars!' using equality but did not match" - - Scenario: Request with JSON body (positive case) - When the mock server is started with interaction 4 - And request 4 is made to the mock server - Then a 200 success response is returned - - Scenario: Request with JSON body (negative case) - When the mock server is started with interaction 4 - And request 4 is made to the mock server with the following changes: - | body | - | JSON: {"one": "a"} | - Then a 500 error response is returned - And the mismatches will contain a "body" mismatch with error "Expected a Map with keys [one, two] but received one with keys [one]" - - Scenario: Request with XML body (positive case) - When the mock server is started with interaction 6 - And request 6 is made to the mock server - Then a 200 success response is returned - - Scenario: Request with XML body (negative case) - When the mock server is started with interaction 6 - And request 6 is made to the mock server with the following changes: - | body | - | XML: A | - Then a 500 error response is returned - And the mismatches will contain a "body" mismatch with error "Expected child but was missing" - - Scenario: Request with a binary body (positive case) - When the mock server is started with interaction 7 - And request 7 is made to the mock server - Then a 200 success response is returned - - Scenario: Request with a binary body (negative case) - When the mock server is started with interaction 7 - And request 7 is made to the mock server with the following changes: - | body | - | file: spider.jpg | - Then a 500 error response is returned - And the mismatches will contain a "body" mismatch with error "Actual body [image/jpeg, 30922 bytes, starting with ffd8ffe000104a46494600010101004800480000ffe100ae4578696600004949] is not equal to the expected body [image/jpeg, 28058 bytes, starting with ffd8ffe000104a46494600010101012c012c0000ffe12db64578696600004949]" - - Scenario: Request with a form post body (positive case) - When the mock server is started with interaction 8 - And request 8 is made to the mock server - Then a 200 success response is returned - - Scenario: Request with a form post body (negative case) - When the mock server is started with interaction 8 - And request 8 is made to the mock server with the following changes: - | body | - | a=1&b=2&c=33&d=4 | - Then a 500 error response is returned - And the mismatches will contain a "body" mismatch with error "Expected form post parameter 'c' with value '3' but was '33'" - - Scenario: Request with a multipart body (positive case) - When the mock server is started with interaction 9 - And request 9 is made to the mock server - Then a 200 success response is returned - - Scenario: Request with a multipart body (negative case) - When the mock server is started with interaction 9 - And request 9 is made to the mock server with the following changes: - | body | - | file: multipart2-body.xml | - Then a 500 error response is returned - And the mismatches will contain a "body" mismatch with error "Actual body [application/octet-stream, 50 bytes, starting with 7b0a2020202022626f6479223a2022546869732069732074686520626f647920] is not equal to the expected body [application/octet-stream, 97 bytes, starting with 3c68746d6c3e0a20203c686561643e0a20203c2f686561643e0a20203c626f64]" diff --git a/compatibility-suite/pact-compatibility-suite/features/V1/http_provider.feature b/compatibility-suite/pact-compatibility-suite/features/V1/http_provider.feature deleted file mode 100644 index 94fda4477..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V1/http_provider.feature +++ /dev/null @@ -1,225 +0,0 @@ -@provider -Feature: Basic HTTP provider - Supports verifying a basic HTTP provider - - Background: - Given the following HTTP interactions have been defined: - | No | method | path | query | headers | body | response | response headers | response content | response body | - | 1 | GET | /basic | | | | 200 | | application/json | file: basic.json | - | 2 | GET | /with_params | a=1&b=2 | | | 200 | | | | - | 3 | GET | /with_headers | | 'X-TEST: Compatibility' | | 200 | | | | - | 4 | PUT | /basic | | | file: basic.json | 200 | | | | - | 5 | GET | /basic | | | | 200 | 'X-TEST: Something' | application/json | file: basic.json | - | 6 | GET | /plain | | | | 200 | | | file: text-body.xml | - | 7 | GET | /xml | | | | 200 | | | file: xml-body.xml | - | 8 | GET | /bin | | | | 200 | | | file: rat.jpg | - | 9 | GET | /form | | | | 200 | | | file: form-post-body.xml | - | 10 | GET | /multi | | | | 200 | | | file: multipart-body.xml | - - Scenario: Verifying a simple HTTP request - Given a provider is started that returns the response from interaction 1 - And a Pact file for interaction 1 is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Verifying multiple Pact files - Given a provider is started that returns the responses from interactions "1, 2" - And a Pact file for interaction 1 is to be verified - And a Pact file for interaction 2 is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Incorrect request is made to provider - Given a provider is started that returns the response from interaction 1 - And a Pact file for interaction 2 is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Response status did not match" error - - Scenario: Verifying a simple HTTP request via a Pact broker - Given a provider is started that returns the response from interaction 1 - And a Pact file for interaction 1 is to be verified from a Pact broker - When the verification is run - Then the verification will be successful - And a verification result will NOT be published back - - Scenario: Verifying a simple HTTP request via a Pact broker with publishing results enabled - Given a provider is started that returns the response from interaction 1 - And a Pact file for interaction 1 is to be verified from a Pact broker - And publishing of verification results is enabled - When the verification is run - Then the verification will be successful - And a successful verification result will be published back for interaction {1} - - Scenario: Verifying multiple Pact files via a Pact broker - Given a provider is started that returns the responses from interactions "1, 2" - And a Pact file for interaction 1 is to be verified from a Pact broker - And a Pact file for interaction 2 is to be verified from a Pact broker - And publishing of verification results is enabled - When the verification is run - Then the verification will be successful - And a successful verification result will be published back for interaction {1} - And a successful verification result will be published back for interaction {2} - - Scenario: Incorrect request is made to provider via a Pact broker - Given a provider is started that returns the response from interaction 1 - And a Pact file for interaction 2 is to be verified from a Pact broker - And publishing of verification results is enabled - When the verification is run - Then the verification will NOT be successful - And a failed verification result will be published back for the interaction {2} - - Scenario: Verifying an interaction with a defined provider state - Given a provider is started that returns the response from interaction 1 - And a provider state callback is configured - And a Pact file for interaction 1 is to be verified with a provider state "state one" defined - When the verification is run - Then the provider state callback will be called before the verification is run - And the provider state callback will receive a setup call with "state one" as the provider state parameter - And the provider state callback will be called after the verification is run - And the provider state callback will receive a teardown call "state one" as the provider state parameter - - Scenario: Verifying an interaction with no defined provider state - Given a provider is started that returns the response from interaction 1 - And a provider state callback is configured - And a Pact file for interaction 1 is to be verified - When the verification is run - Then the provider state callback will be called before the verification is run - And the provider state callback will receive a setup call with "" as the provider state parameter - And the provider state callback will be called after the verification is run - And the provider state callback will receive a teardown call "" as the provider state parameter - - Scenario: Verifying an interaction where the provider state callback fails - Given a provider is started that returns the response from interaction 1 - And a provider state callback is configured, but will return a failure - And a Pact file for interaction 1 is to be verified with a provider state "state one" defined - When the verification is run - Then the provider state callback will be called before the verification is run - And the verification will NOT be successful - And the verification results will contain a "State change request failed" error - And the provider state callback will NOT receive a teardown call - - Scenario: Verifying an interaction where a provider state callback is not configured - Given a provider is started that returns the response from interaction 1 - And a Pact file for interaction 1 is to be verified with a provider state "state one" defined - When the verification is run - Then the verification will be successful - And a warning will be displayed that there was no provider state callback configured for provider state "state one" - - Scenario: Verifying a HTTP request with a request filter configured - Given a provider is started that returns the response from interaction 1 - And a Pact file for interaction 1 is to be verified - And a request filter is configured to make the following changes: - | headers | - | 'A: 1' | - When the verification is run - Then the verification will be successful - And the request to the provider will contain the header "A: 1" - - Scenario: Verifies the response status code - Given a provider is started that returns the response from interaction 1, with the following changes: - | status | - | 400 | - And a Pact file for interaction 1 is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Response status did not match" error - - Scenario: Verifies the response headers - Given a provider is started that returns the response from interaction 1, with the following changes: - | headers | - | 'X-TEST: Compatibility' | - And a Pact file for interaction 5 is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Headers had differences" error - - Scenario: Response with plain text body (positive case) - Given a provider is started that returns the response from interaction 6 - And a Pact file for interaction 6 is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Response with plain text body (negative case) - Given a provider is started that returns the response from interaction 6, with the following changes: - | body | - | Hello Compatibility Suite! | - And a Pact file for interaction 6 is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error - - Scenario: Response with JSON body (positive case) - Given a provider is started that returns the response from interaction 1 - And a Pact file for interaction 1 is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Response with JSON body (negative case) - Given a provider is started that returns the response from interaction 1, with the following changes: - | body | - | JSON: { "one": 100, "two": "b" } | - And a Pact file for interaction 1 is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error - - Scenario: Response with XML body (positive case) - Given a provider is started that returns the response from interaction 7 - And a Pact file for interaction 7 is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Response with XML body (negative case) - Given a provider is started that returns the response from interaction 7, with the following changes: - | body | - | XML: A | - And a Pact file for interaction 7 is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error - - Scenario: Response with binary body (positive case) - Given a provider is started that returns the response from interaction 8 - And a Pact file for interaction 8 is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Response with binary body (negative case) - Given a provider is started that returns the response from interaction 8, with the following changes: - | body | - | file: spider.jpg | - And a Pact file for interaction 8 is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error - - Scenario: Response with form post body (positive case) - Given a provider is started that returns the response from interaction 9 - And a Pact file for interaction 9 is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Response with form post body (negative case) - Given a provider is started that returns the response from interaction 9, with the following changes: - | body | - | a=1&b=2&c=33&d=4 | - And a Pact file for interaction 9 is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error - - Scenario: Response with multipart body (positive case) - Given a provider is started that returns the response from interaction 10 - And a Pact file for interaction 10 is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Response with multipart body (negative case) - Given a provider is started that returns the response from interaction 10, with the following changes: - | body | - | file: multipart2-body.xml | - And a Pact file for interaction 10 is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error diff --git a/compatibility-suite/pact-compatibility-suite/features/V2/http_consumer.feature b/compatibility-suite/pact-compatibility-suite/features/V2/http_consumer.feature deleted file mode 100644 index 14330c33e..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V2/http_consumer.feature +++ /dev/null @@ -1,117 +0,0 @@ -@consumer -Feature: Basic HTTP consumer - Supports basic HTTP consumer interactions - - Background: - Given the following HTTP interactions have been defined: - | No | method | path | query | headers | body | matching rules | - | 1 | POST | /path | | | file: basic.json | regex-matcher-v2.json | - | 2 | POST | /path | | | file: basic.json | type-matcher-v2.json | - | 3 | GET | /aaa/100/ | | | | regex-matcher-path-v2.json | - | 4 | GET | /path | a=1&b=2&c=abc&d=true | | | regex-matcher-query-v2.json | - | 5 | GET | /path | | 'X-Test: 1000' | | regex-matcher-header-v2.json | - - Scenario: Supports a regex matcher (negative case) - When the mock server is started with interaction 1 - And request 1 is made to the mock server - Then a 500 error response is returned - And the mismatches will contain a "body" mismatch with error "Expected 'a' to match '\w{3}\d{3}'" - - Scenario: Supports a regex matcher (positive case) - When the mock server is started with interaction 1 - And request 1 is made to the mock server with the following changes: - | body | - | JSON: { "one": "HHH123", "two": "b" } | - Then a 200 success response is returned - - Scenario: Supports a type matcher (negative case) - When the mock server is started with interaction 2 - And request 2 is made to the mock server with the following changes: - | body | - | JSON: { "one": 100, "two": "b" } | - Then a 500 error response is returned - And the mismatches will contain a "body" mismatch with error "Expected 100 (Integer) to be the same type as 'a' (String)" - - Scenario: Type matchers cascade to children (positive case) - When the mock server is started with interaction 2 but with the following changes: - | body | - | file: 3-level.json | - And request 2 is made to the mock server with the following changes: - | body | - | JSON: { "one": { "a": { "ids": [100], "status": "Lovely" } }, "two": [ { "ids": [1], "status": "BAD" } ] } | - Then a 200 success response is returned - - Scenario: Type matchers cascade to children (negative case) - When the mock server is started with interaction 2 but with the following changes: - | body | - | file: 3-level.json | - And request 2 is made to the mock server with the following changes: - | body | - | JSON: { "one": { "a": { "ids": ["100"], "status": "Lovely" } }, "two": [ { "ids": [1], "status": "BAD" } ] } | - Then a 500 error response is returned - And the mismatches will contain a "body" mismatch with error "Expected '100' (String) to be the same type as 1 (Integer)" - - Scenario: Supports a type matcher (positive case) - When the mock server is started with interaction 2 - And request 2 is made to the mock server with the following changes: - | body | - | JSON: { "one": "HHH123", "two": "b" } | - Then a 200 success response is returned - - Scenario: Supports a matcher for request paths - When the mock server is started with interaction 3 - And request 3 is made to the mock server with the following changes: - | path | - | /XYZ/123 | - Then a 200 success response is returned - - Scenario: Supports matchers for request query parameters - When the mock server is started with interaction 4 - And request 4 is made to the mock server with the following changes: - | query | - | b=2&c=abc&d=true&a=999 | - Then a 200 success response is returned - - Scenario: Supports matchers for repeated request query parameters (positive case) - When the mock server is started with interaction 4 - And request 4 is made to the mock server with the following changes: - | query | - | a=123&b=2&c=abc&d=true&a=9999 | - Then a 200 success response is returned - - Scenario: Supports matchers for repeated request query parameters (negative case) - When the mock server is started with interaction 4 - And request 4 is made to the mock server with the following changes: - | query | - | a=123&b=2&c=abc&d=true&a=9999X | - Then a 500 error response is returned - And the mismatches will contain a "query" mismatch with error "Expected '9999X' to match '\d{1,4}'" - - Scenario: Supports matchers for request headers - When the mock server is started with interaction 5 - And request 5 is made to the mock server with the following changes: - | headers | - | 'X-Test: 1000' | - Then a 200 success response is returned - - Scenario: Supports matchers for repeated request headers (positive case) - When the mock server is started with interaction 5 - And request 5 is made to the mock server with the following changes: - | raw headers | - | 'X-Test: 1000', 'X-Test: 1234', 'X-Test: 9999' | - Then a 200 success response is returned - - Scenario: Supports matchers for repeated request headers (negative case) - When the mock server is started with interaction 5 - And request 5 is made to the mock server with the following changes: - | raw headers | - | 'X-Test: 1000', 'X-Test: 1234', 'X-Test: 9999ABC' | - Then a 500 error response is returned - And the mismatches will contain a "header" mismatch with error "Expected '9999ABC' to match '\d{1,4}'" - - Scenario: Supports matchers for request bodies - When the mock server is started with interaction 2 - And request 2 is made to the mock server with the following changes: - | body | - | JSON: { "one": "c", "two": "b" } | - Then a 200 success response is returned diff --git a/compatibility-suite/pact-compatibility-suite/features/V2/http_provider.feature b/compatibility-suite/pact-compatibility-suite/features/V2/http_provider.feature deleted file mode 100644 index d51df8b4d..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V2/http_provider.feature +++ /dev/null @@ -1,43 +0,0 @@ -@provider -Feature: Basic HTTP provider - Supports verifying a basic HTTP provider - - Background: - Given the following HTTP interactions have been defined: - | No | method | path | response | response headers | response content | response body | response matching rules | - | 1 | GET | /one | 200 | 'X-TEST: 1' | application/json | file: basic.json | regex-matcher-header-v2.json | - | 2 | GET | /two | 200 | | application/json | file: basic.json | type-matcher-v2.json | - - Scenario: Supports matching rules for the response headers (positive case) - Given a provider is started that returns the response from interaction 1, with the following changes: - | headers | - | 'X-TEST: 1000' | - And a Pact file for interaction 1 is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Supports matching rules for the response headers (negative case) - Given a provider is started that returns the response from interaction 1, with the following changes: - | headers | - | 'X-TEST: 123ABC' | - And a Pact file for interaction 1 is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Headers had differences" error - - Scenario: Verifies the response body (positive case) - Given a provider is started that returns the response from interaction 2, with the following changes: - | body | - | JSON: { "one": "100", "two": "b" } | - And a Pact file for interaction 2 is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Verifies the response body (negative case) - Given a provider is started that returns the response from interaction 2, with the following changes: - | body | - | JSON: { "one": 100, "two": "b" } | - And a Pact file for interaction 2 is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error diff --git a/compatibility-suite/pact-compatibility-suite/features/V3/generators.feature b/compatibility-suite/pact-compatibility-suite/features/V3/generators.feature deleted file mode 100644 index 13aa7bb20..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V3/generators.feature +++ /dev/null @@ -1,71 +0,0 @@ -Feature: V3 era Generators - - Scenario: Supports a random integer generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | randomint-generator.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "integer" - - Scenario: Supports a random decimal generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | randomdec-generator.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "decimal number" - - Scenario: Supports a random hexadecimal generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | randomhex-generator.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "hexadecimal number" - - Scenario: Supports a random string generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | randomstr-generator.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "random string" - - Scenario: Supports a regex generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | randomregex-generator.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "string from the regex" - - Scenario: Supports a date generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | date-generator.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "date" - - Scenario: Supports a time generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | time-generator.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "time" - - Scenario: Supports a date-time generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | datetime-generator.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "date-time" - - Scenario: Supports a UUID generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | uuid-generator.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "UUID" - - Scenario: Supports a boolean generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | boolean-generator.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "boolean" diff --git a/compatibility-suite/pact-compatibility-suite/features/V3/http_consumer.feature b/compatibility-suite/pact-compatibility-suite/features/V3/http_consumer.feature deleted file mode 100644 index 69840074f..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V3/http_consumer.feature +++ /dev/null @@ -1,25 +0,0 @@ -@consumer -Feature: HTTP consumer - Supports V3 HTTP consumer interactions - - Scenario: Supports specifying multiple provider states - Given an integration is being defined for a consumer test - And a provider state "state one" is specified - And a provider state "state two" is specified - When the Pact file for the test is generated - Then the interaction in the Pact file will contain 2 provider states - And the interaction in the Pact file will contain provider state "state one" - And the interaction in the Pact file will contain provider state "state two" - - Scenario: Supports data for provider states - Given an integration is being defined for a consumer test - And a provider state "a user exists" is specified with the following data: - | username | name | age | - | "Test" | "Test Guy" | 66 | - When the Pact file for the test is generated - Then the interaction in the Pact file will contain 1 provider state - And the interaction in the Pact file will contain provider state "a user exists" - And the provider state "a user exists" in the Pact file will contain the following parameters: - | parameters | - | {"age":66,"name":"Test Guy","username":"Test"} | - diff --git a/compatibility-suite/pact-compatibility-suite/features/V3/http_generators.feature b/compatibility-suite/pact-compatibility-suite/features/V3/http_generators.feature deleted file mode 100644 index 9021f988d..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V3/http_generators.feature +++ /dev/null @@ -1,53 +0,0 @@ -Feature: V3 era Generators applied to HTTP parts - - Scenario: Supports using a generator with the request path - Given a request configured with the following generators: - | generators | - | JSON: { "path": { "type": "ProviderState", "expression": "/path/${id}" } } | - And the generator test mode is set as "Provider" - When the request is prepared for use with a "providerState" context: - | { "id": 1000 } | - Then the request "path" will be set as "/path/1000" - - Scenario: Supports using a generator with the request headers - Given a request configured with the following generators: - | generators | - | JSON: { "header": { "X-TEST": { "type": "RandomInt", "min": 1, "max": 10 } } } | - When the request is prepared for use - Then the request "header[X-TEST]" will match "\d+" - - Scenario: Supports using a generator with the request query parameters - Given a request configured with the following generators: - | generators | - | JSON: { "query": { "v1": { "type": "RandomInt", "min": 1, "max": 10 } } } | - When the request is prepared for use - Then the request "queryParameter[v1]" will match "\d+" - - Scenario: Supports using a generator with the request body - Given a request configured with the following generators: - | body | generators | - | file: basic.json | randomint-generator.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with an "integer" - - Scenario: Supports using a generator with the response status - Given a response configured with the following generators: - | generators | - | JSON: { "status": { "type": "RandomInt", "min": 201, "max": 599 } } | - When the response is prepared for use - Then the response "status" will not be "200" - Then the response "status" will match "\d+" - - Scenario: Supports using a generator with the response headers - Given a response configured with the following generators: - | generators | - | JSON: { "header": { "X-TEST": { "type": "RandomInt", "min": 1, "max": 10 } } } | - When the response is prepared for use - Then the response "header[X-TEST]" will match "\d+" - - Scenario: Supports using a generator with the response body - Given a response configured with the following generators: - | body | generators | - | file: basic.json | randomint-generator.json | - When the response is prepared for use - Then the body value for "$.one" will have been replaced with a "integer" diff --git a/compatibility-suite/pact-compatibility-suite/features/V3/http_matching.feature b/compatibility-suite/pact-compatibility-suite/features/V3/http_matching.feature deleted file mode 100644 index 8f34705d3..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V3/http_matching.feature +++ /dev/null @@ -1,45 +0,0 @@ -Feature: Matching HTTP parts (request or response) - - Scenario: Comparing content type headers which are equal - Given an expected request with a "content-type" header of "application/json" - And a request is received with a "content-type" header of "application/json" - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Comparing content type headers where they have the same charset - Given an expected request with a "content-type" header of "application/json;charset=UTF-8" - And a request is received with a "content-type" header of "application/json;charset=utf-8" - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Comparing content type headers where the actual has a charset - Given an expected request with a "content-type" header of "application/json" - And a request is received with a "content-type" header of "application/json;charset=UTF-8" - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Comparing content type headers where the actual is missing a charset - Given an expected request with a "content-type" header of "application/json;charset=UTF-8" - And a request is received with a "content-type" header of "application/json" - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "content-type" -> "Expected header 'content-type' to have value 'application/json;\s*charset=UTF-8' but was 'application/json'" - - Scenario: Comparing content type headers where the actual has a different charset - Given an expected request with a "content-type" header of "application/json;charset=UTF-16" - And a request is received with a "content-type" header of "application/json;charset=UTF-8" - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "content-type" -> "Expected header 'content-type' to have value 'application/json;\s*charset=UTF-16' but was 'application/json;\s*charset=UTF-8'" - - Scenario: Comparing accept headers where the actual has additional parameters - Given an expected request with an "accept" header of "text/html, application/xhtml+xml, application/xml, image/webp, */*" - And a request is received with an "accept" header of "text/html, application/xhtml+xml, application/xml;q=0.9, image/webp, */*;q=0.8" - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Comparing accept headers where the actual has is missing a value - Given an expected request with an "accept" header of "text/html, application/xhtml+xml, application/xml, image/webp, */*" - And a request is received with an "accept" header of "text/html, application/xml;q=0.9, image/webp, */*;q=0.8" - When the request is compared to the expected one - Then the comparison should NOT be OK diff --git a/compatibility-suite/pact-compatibility-suite/features/V3/http_provider.feature b/compatibility-suite/pact-compatibility-suite/features/V3/http_provider.feature deleted file mode 100644 index 7033e1c3e..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V3/http_provider.feature +++ /dev/null @@ -1,39 +0,0 @@ -@provider -Feature: HTTP provider - Supports verifying a HTTP provider using V3 features - - Background: - Given the following HTTP interactions have been defined: - | No | method | path | response | response headers | response content | response body | response matching rules | - | 1 | GET | /one | 200 | 'X-TEST: 1' | application/json | file: basic.json | regex-matcher-header-v2.json | - - Scenario: Verifying an interaction with multiple defined provider states - Given a provider is started that returns the response from interaction 1 - And a provider state callback is configured - And a Pact file for interaction 1 is to be verified with the following provider states defined: - | State Name | - | State One | - | State Two | - When the verification is run - Then the provider state callback will be called before the verification is run - And the provider state callback will receive a setup call with "State One" as the provider state parameter - And the provider state callback will receive a setup call with "State Two" as the provider state parameter - And the provider state callback will be called after the verification is run - And the provider state callback will receive a teardown call "State One" as the provider state parameter - And the provider state callback will receive a teardown call "State Two" as the provider state parameter - - Scenario: Verifying an interaction with a provider state with parameters - Given a provider is started that returns the response from interaction 1 - And a provider state callback is configured - And a Pact file for interaction 1 is to be verified with the following provider states defined: - | State Name | Parameters | - | A user exists | { "name": "Bob", "age": 22 } | - When the verification is run - Then the provider state callback will be called before the verification is run - And the provider state callback will receive a setup call with "A user exists" and the following parameters: - | name | age | - | "Bob" | 22 | - And the provider state callback will be called after the verification is run - And the provider state callback will receive a teardown call "A user exists" and the following parameters: - | name | age | - | "Bob" | 22 | diff --git a/compatibility-suite/pact-compatibility-suite/features/V3/matching_rules.feature b/compatibility-suite/pact-compatibility-suite/features/V3/matching_rules.feature deleted file mode 100644 index 7dd079d77..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V3/matching_rules.feature +++ /dev/null @@ -1,236 +0,0 @@ -Feature: V3 era Matching Rules - - Scenario: Supports an equality matcher to reset cascading rules - Given an expected request configured with the following: - | body | matching rules | - | file: 3-level.json | equality-matcher-reset-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": { "a": { "ids": [100], "status": "Lovely" } }, "two": [ { "ids": [1], "status": "BAD" } ] } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.one.a.status" -> "Expected 'Lovely' (String) to be equal to 'OK' (String)" - - Scenario: Supports an include matcher (positive case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | include-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": "cat", "two": "b" } | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports an include matcher (negative case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | include-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": "dog", "two": "b" } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.one" -> "Expected 'dog' to include 'a'" - - Scenario: Supports a minmax type matcher (positive case) - Given an expected request configured with the following: - | body | matching rules | - | file: 3-level.json | minmax-type-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": { "a": { "ids": [100], "status": "OK" } }, "two": [ { "ids": [1,2,3], "status": "BAD" } ] } | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports a minmax type matcher (negative case) - Given an expected request configured with the following: - | body | matching rules | - | file: 3-level.json | minmax-type-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": { "a": { "ids": [], "status": "OK" } }, "two": [ { "ids": [1,2,3,4,5], "status": "BAD" } ] } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.one.a.ids" -> "Expected [] (size 0) to have minimum size of 1" - And the mismatches will contain a mismatch with error "$.two[0].ids" -> "Expected [1, 2, 3, 4, 5] (size 5) to have maximum size of 4" - - Scenario: Supports a number type matcher (positive case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | number-type-matcher-v3.json | - And the following requests are received: - | body | desc | - | JSON: { "one": 100, "two": "b" } | Integer number | - | JSON: { "one": 100.01, "two": "b" } | floating point number | - When the requests are compared to the expected one - Then the comparison should be OK - - Scenario: Supports a number type matcher (negative case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | number-type-matcher-v3.json | - And the following requests are received: - | body | desc | - | JSON: { "one": true, "two": "b" } | Boolean | - | JSON: { "one": "100X01", "two": "b" } | String | - When the requests are compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.one" -> "Expected true (Boolean) to be a number" - And the mismatches will contain a mismatch with error "$.one" -> "Expected '100X01' (String) to be a number" - - Scenario: Supports an integer type matcher, no digits after the decimal point (positive case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | integer-type-matcher-v3.json | - And the following requests are received: - | body | desc | - | JSON: { "one": 100, "two": "b" } | Integer number | - | JSON: { "one": "100", "two": "b" } | String representation of an integer | - When the requests are compared to the expected one - Then the comparison should be OK - - Scenario: Supports a integer type matcher, no digits after the decimal point (negative case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | integer-type-matcher-v3.json | - And the following requests are received: - | body | desc | - | JSON: { "one": [], "two": "b" } | Array | - | JSON: { "one": 100.1, "two": "b" } | Floating point number | - | JSON: { "one": "100X01", "two": "b" } | Not a string representation of an integer | - When the requests are compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.one" -> "Expected [] (Array) to be an integer" - And the mismatches will contain a mismatch with error "$.one" -> "Expected 100.1 (Decimal) to be an integer" - And the mismatches will contain a mismatch with error "$.one" -> "Expected '100X01' (String) to be an integer" - - Scenario: Supports an decimal type matcher, must have significant digits after the decimal point (positive case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | decimal-type-matcher-v3.json | - And the following requests are received: - | body | desc | - | JSON: { "one": 100.1234, "two": "b" } | Floating point number | - | JSON: { "one": "100.1234", "two": "b" } | String representation of a floating point number | - When the requests are compared to the expected one - Then the comparison should be OK - - Scenario: Supports a decimal type matcher, must have significant digits after the decimal point (negative case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | decimal-type-matcher-v3.json | - And the following requests are received: - | body | desc | - | JSON: { "one": null, "two": "b" } | Null | - | JSON: { "one": 100, "two": "b" } | Integer number | - | JSON: { "one": "100X01", "two": "b" } | Not a string representation of an decimal number | - When the requests are compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.one" -> "Expected null (Null) to be a decimal number" - And the mismatches will contain a mismatch with error "$.one" -> "Expected 100 (Integer) to be a decimal number" - And the mismatches will contain a mismatch with error "$.one" -> "Expected '100X01' (String) to be a decimal number" - - Scenario: Supports a null matcher (positive case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | null-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": null, "two": "b" } | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports an null matcher (negative case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | null-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": "", "two": "b" } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.one" -> "Expected '' (String) to be a null value" - - Scenario: Supports a Date and Time matcher (positive case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | date-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": "2023-07-19", "two": "b" } | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports a Date and Time matcher (negative case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | date-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": "23/07/19", "two": "b" } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.one" -> "Expected '23/07/19' to match a date pattern of 'yyyy-MM-dd'" - - Scenario: Supports a Boolean matcher (positive case) - Given an expected request configured with the following: - | body | matching rules | - | JSON: { "one": true, "two": "b" } | boolean-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": false, "two": "b" } | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports a Boolean matcher (negative case) - Given an expected request configured with the following: - | body | matching rules | - | JSON: { "one": true, "two": "b" } | boolean-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": "", "two": "b" } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.one" -> "Expected '' (String) to match a boolean" - - Scenario: Supports a ContentType matcher (positive case) - Given an expected request configured with the following: - | content type | body | matching rules | - | application/octet-stream | file: rat.jpg | contenttype-matcher-v3.json | - And a request is received with the following: - | content type | body | - | application/octet-stream | file: spider.jpg | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports a ContentType matcher (negative case) - Given an expected request configured with the following: - | content type | body | matching rules | - | application/octet-stream | file: rat.jpg | contenttype-matcher-v3.json | - And a request is received with the following: - | content type | body | - | application/octet-stream | file: sample.pdf | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$" -> "Expected binary contents to have content type 'image/jpeg' but detected contents was 'application/pdf'" - - Scenario: Supports a Values matcher (positive case, ignores missing and additional keys) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | values-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": "", "three": "b", "four": "c", "five": "100" } | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports a Values matcher (negative case, final type is wrong) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | values-matcher-v3.json | - And a request is received with the following: - | body | - | JSON: { "one": "", "two": "b", "three": "c", "four": 100 } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.four" -> "Expected 100 (Integer) to be the same type as 'a' (String)" diff --git a/compatibility-suite/pact-compatibility-suite/features/V3/message_consumer.feature b/compatibility-suite/pact-compatibility-suite/features/V3/message_consumer.feature deleted file mode 100644 index 319f6aea0..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V3/message_consumer.feature +++ /dev/null @@ -1,78 +0,0 @@ -@message -Feature: Message consumer - Supports V3 message consumer interactions - - Scenario: When all messages are successfully processed - Given a message integration is being defined for a consumer test - And the message payload contains the "basic" JSON document - When the message is successfully processed - Then the received message payload will contain the "basic" JSON document - And the received message content type will be "application/json" - And the consumer test will have passed - And a Pact file for the message interaction will have been written - And the pact file will contain 1 message interaction - And the first message in the pact file will contain the "basic.json" document - And the first message in the pact file content type will be "application/json" - - Scenario: When not all messages are successfully processed - Given a message integration is being defined for a consumer test - And the message payload contains the "basic" JSON document - When the message is NOT successfully processed with a "Test failed" exception - Then the consumer test will have failed - And the consumer test error will be "Test failed" - And a Pact file for the message interaction will NOT have been written - - Scenario: Supports arbitrary message metadata - Given a message integration is being defined for a consumer test - And the message payload contains the "basic" JSON document - And the message contains the following metadata: - | key | value | - | Origin | Some Text | - | TagData | JSON: { "ID": "sjhdjkshsdjh", "weight": 100.5 } | - When the message is successfully processed - Then the received message metadata will contain "Origin" == "Some Text" - And the received message metadata will contain "TagData" == "JSON: { \"ID\": \"sjhdjkshsdjh\", \"weight\": 100.5 }" - And a Pact file for the message interaction will have been written - And the first message in the pact file will contain the message metadata "Origin" == "Some Text" - And the first message in the pact file will contain the message metadata "TagData" == "JSON: { \"ID\": \"sjhdjkshsdjh\", \"weight\": 100.5 }" - - Scenario: Supports specifying provider states - Given a message integration is being defined for a consumer test - And a provider state "state one" for the message is specified - And a provider state "state two" for the message is specified - And a message is defined - When the message is successfully processed - Then a Pact file for the message interaction will have been written - And the first message in the pact file will contain 2 provider states - And the first message in the Pact file will contain provider state "state one" - And the first message in the Pact file will contain provider state "state two" - - Scenario: Supports data for provider states - Given a message integration is being defined for a consumer test - And a provider state "a user exists" for the message is specified with the following data: - | username | name | age | - | "Test" | "Test Guy" | 66 | - And a message is defined - When the message is successfully processed - Then a Pact file for the message interaction will have been written - And the first message in the pact file will contain 1 provider state - And the provider state "a user exists" for the message will contain the following parameters: - | parameters | - | {"age":66,"name":"Test Guy","username":"Test"} | - - Scenario: Supports the use of generators with the message body - Given a message integration is being defined for a consumer test - And the message is configured with the following: - | body | generators | - | file: basic.json | randomint-generator.json | - When the message is successfully processed - Then the message contents for "$.one" will have been replaced with an "integer" - - Scenario: Supports the use of generators with message metadata - Given a message integration is being defined for a consumer test - And the message is configured with the following: - | generators | metadata | - | JSON: { "metadata": { "ID": { "type": "RandomInt", "min": 0, "max": 1000 } } } | { "ID": "sjhdjkshsdjh", "weight": 100.5 } | - When the message is successfully processed - Then the received message metadata will contain "weight" == "JSON: 100.5" - And the received message metadata will contain "ID" replaced with an "integer" diff --git a/compatibility-suite/pact-compatibility-suite/features/V3/message_provider.feature b/compatibility-suite/pact-compatibility-suite/features/V3/message_provider.feature deleted file mode 100644 index 5cebd8ce5..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V3/message_provider.feature +++ /dev/null @@ -1,147 +0,0 @@ -@message -Feature: Message provider - Supports verifying a V3 message Pacts - - Scenario: Verifying a simple message - Given a provider is started that can generate the "basic" message with "file: basic.json" - And a Pact file for "basic":"file: basic.json" is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Verifying multiple Pact files - Given a provider is started that can generate the "basic" message with "file: basic.json" - And a provider is started that can generate the "xml" message with "file: xml-body.xml" - And a Pact file for "basic":"file: basic.json" is to be verified - And a Pact file for "xml":"file: xml-body.xml" is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Incorrect message is generated by the provider - Given a provider is started that can generate the "json" message with "JSON: { \"one\": \"a\", \"two\": \"c\" }" - And a Pact file for "json":"file: basic.json" is to be verified - When the verification is run - Then the verification will NOT be successful - - Scenario: Verifying an interaction with a defined provider state - Given a provider is started that can generate the "basic" message with "file: basic.json" - And a provider state callback is configured - And a Pact file for "basic":"file: basic.json" is to be verified with provider state "state one" - When the verification is run - Then the provider state callback will be called before the verification is run - And the provider state callback will receive a setup call with "state one" as the provider state parameter - And the provider state callback will be called after the verification is run - And the provider state callback will receive a teardown call "state one" as the provider state parameter - - Scenario: Verifies the message metadata - Given a provider is started that can generate the "basic" message with "file: basic.json" and the following metadata: - | key | value | - | Origin | Some Text | - | TagData | JSON: { "ID": "sjhdjkshsdjh", "weight": 100.5 } | - And a Pact file for "basic":"file: basic.json" is to be verified with the following metadata: - | key | value | - | Origin | Some Text | - | TagData | JSON: { "ID": "100", "weight": 100.5 } | - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Metadata had differences" error - - Scenario: Message with plain text body (positive case) - Given a provider is started that can generate the "basic" message with "Hello World" - And a Pact file for "basic":"Hello World" is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Message with plain text body (negative case) - Given a provider is started that can generate the "basic" message with "Hello World" - And a Pact file for "basic":"Hello Jupiter" is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error - - Scenario: Message with JSON body (positive case) - Given a provider is started that can generate the "basic" message with "file: basic.json" - And a Pact file for "basic":"file: basic.json" is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Message with JSON body (negative case) - Given a provider is started that can generate the "json" message with "JSON: { \"one\": \"a\", \"two\": \"c\" }" - And a Pact file for "json":"file: basic.json" is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error - - Scenario: Message with XML body (positive case) - Given a provider is started that can generate the "xml" message with "file: xml-body.xml" - And a Pact file for "xml":"file: xml-body.xml" is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Message with XML body (negative case) - Given a provider is started that can generate the "xml" message with "file: xml-body.xml" - And a Pact file for "xml":"file: xml2-body.xml" is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error - - Scenario: Message with binary body (positive case) - Given a provider is started that can generate the "image" message with "file: rat.jpg" - And a Pact file for "image":"file: rat.jpg" is to be verified - When the verification is run - Then the verification will be successful - - Scenario: Message with binary body (negative case) - Given a provider is started that can generate the "image" message with "file: rat.jpg" - And a Pact file for "image":"file: spider.jpg" is to be verified - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error - - Scenario: Supports matching rules for the message metadata (positive case) - Given a provider is started that can generate the "basic" message with "file: basic.json" and the following metadata: - | key | value | - | Origin | AAA-123 | - | TagData | JSON: { "ID": "123", "weight": 100.5 } | - And a Pact file for "basic" is to be verified with the following: - | body | file: basic.json | - | matching rules | regex-matcher-metadata.json | - | metadata | Origin=AXP-1000; TagData=JSON: { "ID": "123", "weight": 100.5 } | - When the verification is run - Then the verification will be successful - - Scenario: Supports matching rules for the message metadata (negative case) - Given a provider is started that can generate the "basic" message with "file: basic.json" and the following metadata: - | key | value | - | Origin | AAAB-123 | - | TagData | JSON: { "ID": "123", "weight": 100.5 } | - And a Pact file for "basic" is to be verified with the following: - | body | file: basic.json | - | matching rules | regex-matcher-metadata.json | - | metadata | Origin=AXP-1000; TagData=JSON: { "ID": "123", "weight": 100.5 } | - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Metadata had differences" error - - Scenario: Supports matching rules for the message body (positive case) - Given a provider is started that can generate the "basic" message with "file: basic2.json" - And a Pact file for "basic" is to be verified with the following: - | body | file: basic.json | - | matching rules | include-matcher-v3.json | - When the verification is run - Then the verification will be successful - - Scenario: Supports matching rules for the message body (negative case) - Given a provider is started that can generate the "basic" message with "file: basic3.json" - And a Pact file for "basic" is to be verified with the following: - | body | file: basic.json | - | matching rules | include-matcher-v3.json | - When the verification is run - Then the verification will NOT be successful - And the verification results will contain a "Body had differences" error - - @wip - Scenario: Supports messages with body formatted for the Kafka schema registry - Given a provider is started that can generate the "kafka" message with "file: kafka-body.xml" - And a Pact file for "kafka":"file: kafka-expected-body.xml" is to be verified - When the verification is run - Then the verification will be successful diff --git a/compatibility-suite/pact-compatibility-suite/features/V4/generators.feature b/compatibility-suite/pact-compatibility-suite/features/V4/generators.feature deleted file mode 100644 index d9abdbf54..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V4/generators.feature +++ /dev/null @@ -1,47 +0,0 @@ -Feature: V4 era Generators - - Scenario: Supports a Provider State generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | providerstate-generator.json | - And the generator test mode is set as "Provider" - When the request is prepared for use with a "providerState" context: - | { "id": 1000 } | - Then the body value for "$.one" will have been replaced with "1000" - - Scenario: Supports a Mock server URL generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | mockserver-generator.json | - And the generator test mode is set as "Consumer" - When the request is prepared for use with a "mockServer" context: - | { "href": "http://somewhere.world" } | - Then the body value for "$.one" will have been replaced with "http://somewhere.world/a" - - Scenario: Supports a simple UUID generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | uuid-generator-simple.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "simple UUID" - - Scenario: Supports a lower-case-hyphenated UUID generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | uuid-generator-lower-case-hyphenated.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "lower-case-hyphenated UUID" - - Scenario: Supports a upper-case-hyphenated UUID generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | uuid-generator-upper-case-hyphenated.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "upper-case-hyphenated UUID" - - Scenario: Supports a URN UUID generator - Given a request configured with the following generators: - | body | generators | - | file: basic.json | uuid-generator-urn.json | - When the request is prepared for use - Then the body value for "$.one" will have been replaced with a "URN UUID" diff --git a/compatibility-suite/pact-compatibility-suite/features/V4/http_consumer.feature b/compatibility-suite/pact-compatibility-suite/features/V4/http_consumer.feature deleted file mode 100644 index 79f85fabe..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V4/http_consumer.feature +++ /dev/null @@ -1,26 +0,0 @@ -@consumer -Feature: HTTP consumer - Supports V4 HTTP consumer interactions - - Scenario: Sets the type for the interaction - Given an HTTP interaction is being defined for a consumer test - When the Pact file for the test is generated - Then the first interaction in the Pact file will have a type of "Synchronous/HTTP" - - Scenario: Supports specifying a key for the interaction - Given an HTTP interaction is being defined for a consumer test - And a key of "123ABC" is specified for the HTTP interaction - When the Pact file for the test is generated - Then the first interaction in the Pact file will have "key" = '"123ABC"' - - Scenario: Supports specifying the interaction is pending - Given an HTTP interaction is being defined for a consumer test - And the HTTP interaction is marked as pending - When the Pact file for the test is generated - Then the first interaction in the Pact file will have "pending" = 'true' - - Scenario: Supports adding comments - Given an HTTP interaction is being defined for a consumer test - And a comment "this is a comment" is added to the HTTP interaction - When the Pact file for the test is generated - Then the first interaction in the Pact file will have "comments" = '{"text":["this is a comment"]}' diff --git a/compatibility-suite/pact-compatibility-suite/features/V4/http_provider.feature b/compatibility-suite/pact-compatibility-suite/features/V4/http_provider.feature deleted file mode 100644 index be3d1ff53..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V4/http_provider.feature +++ /dev/null @@ -1,29 +0,0 @@ -@provider -Feature: HTTP provider - Supports verifying a HTTP provider using V4 features - - Background: - Given the following HTTP interactions have been defined: - | No | method | path | query | headers | body | response | response headers | response content | response body | - | 1 | GET | /basic | | | | 200 | | application/json | file: basic.json | - - Scenario: Verifying a pending HTTP interaction - Given a provider is started that returns the response from interaction 1, with the following changes: - | body | - | file: basic2.json | - And a Pact file for interaction 1 is to be verified, but is marked pending - When the verification is run - Then the verification will be successful - And there will be a pending "Body had differences" error - - Scenario: Verifying a HTTP interaction with comments - Given a provider is started that returns the response from interaction 1 - And a Pact file for interaction 1 is to be verified with the following comments: - | comment | type | - | comment one | text | - | comment two | text | - | compatibility-suite | testname | - When the verification is run - Then the comment "comment one" will have been printed to the console - And the comment "comment two" will have been printed to the console - And the "compatibility-suite" will displayed as the original test name diff --git a/compatibility-suite/pact-compatibility-suite/features/V4/matching_rules.feature b/compatibility-suite/pact-compatibility-suite/features/V4/matching_rules.feature deleted file mode 100644 index a20ce3e04..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V4/matching_rules.feature +++ /dev/null @@ -1,157 +0,0 @@ -Feature: V4 era Matching Rules - - Scenario: Supports a status code matcher (positive case) - Given an expected response configured with the following: - | status | matching rules | - | 200 | statuscode-matcher-v4.json | - And a status 299 response is received - When the response is compared to the expected one - Then the response comparison should be OK - - Scenario: Supports a status code matcher (negative case) - Given an expected response configured with the following: - | status | matching rules | - | 200 | statuscode-matcher-v4.json | - And a status 400 response is received - When the response is compared to the expected one - Then the response comparison should NOT be OK - And the response mismatches will contain a "status" mismatch with error "Expected status code 400 to be a Successful response (200–299)" - - Scenario: Supports a not empty matcher (positive case) - Given an expected request configured with the following: - | body | matching rules | - | JSON: { "one": "", "two": ["b"] } | notempty-matcher-v4.json | - And a request is received with the following: - | body | - | JSON: { "one": "cat", "two": ["rat"] } | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports a not empty matcher with binary data (positive case) - Given an expected request configured with the following: - | body | matching rules | - | file: rat.jpg | notempty2-matcher-v4.json | - And a request is received with the following: - | body | - | file: spider.jpg | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports a not empty matcher (negative case) - Given an expected request configured with the following: - | body | matching rules | - | JSON: { "one": "a", "two": ["b"] } | notempty-matcher-v4.json | - And a request is received with the following: - | body | - | JSON: { "one": "", "two": [] } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.one" -> "Expected '' (String) to not be empty" - And the mismatches will contain a mismatch with error "$.two" -> "Expected [] (Array) to not be empty" - - Scenario: Supports a not empty matcher (negative case 2, types are different) - Given an expected request configured with the following: - | body | matching rules | - | JSON: { "one": "a", "two": ["b"] } | notempty-matcher-v4.json | - And a request is received with the following: - | body | - | JSON: { "one": "a", "two": "b" } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.two" -> "Type mismatch: Expected 'b' (String) to be the same type as [\"b\"] (Array)" - - Scenario: Supports a not empty matcher with binary data (negative case) - Given an expected request configured with the following: - | body | matching rules | - | file: rat.jpg | notempty2-matcher-v4.json | - And a request is received with the following: - | content type | body | - | image/jpeg | EMPTY | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$" -> "Expected [] (0 bytes) to not be empty" - - Scenario: Supports a semver matcher (positive case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | semver-matcher-v4.json | - And a request is received with the following: - | body | - | JSON: { "one": "1.0.0", "two": "2.0.0" } | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports a semver matcher (negative case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | semver-matcher-v4.json | - And a request is received with the following: - | body | - | JSON: { "one": "1.0", "two": "1.0abc" } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.one" -> "'1.0' is not a valid semantic version" - And the mismatches will contain a mismatch with error "$.two" -> "'1.0abc' is not a valid semantic version" - - Scenario: Supports an EachKey matcher (positive case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | eachkey-matcher-v4.json | - And a request is received with the following: - | body | - | JSON: { "one": "a", "two": "b", "three": "c", "four": "d" } | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports an EachKey matcher (negative case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | eachkey-matcher-v4.json | - And a request is received with the following: - | body | - | JSON: { "one": "a", "two": "b", "three": "c", "100": "d" } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$[100]" -> "Expected '100' to match '[a-z]+" - - Scenario: Supports an EachValue matcher (positive case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | eachvalue-matcher-v4.json | - And a request is received with the following: - | body | - | JSON: { "one": "a", "three": "b", "four": "c", "five": "d" } | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports a EachValue matcher (negative case) - Given an expected request configured with the following: - | body | matching rules | - | file: basic.json | eachvalue-matcher-v4.json | - And a request is received with the following: - | body | - | JSON: { "one": "", "two": "b", "three": "c", "four": "100" } | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.four" -> "Expected '100' to match '[a-z]+" - - Scenario: Supports an ArrayContains matcher (positive case) - Given an expected request configured with the following: - | content type | body | matching rules | - | application/vnd.siren+json | file: siren.json | arraycontains-matcher-v4.json | - And a request is received with the following: - | content type | body | - | application/vnd.siren+json | file: siren2.json | - When the request is compared to the expected one - Then the comparison should be OK - - Scenario: Supports a ArrayContains matcher (negative case) - Given an expected request configured with the following: - | content type | body | matching rules | - | application/vnd.siren+json | file: siren.json | arraycontains-matcher-v4.json | - And a request is received with the following: - | content type | body | - | application/vnd.siren+json | file: siren3.json | - When the request is compared to the expected one - Then the comparison should NOT be OK - And the mismatches will contain a mismatch with error "$.actions" -> "Variant at index 1 ({\"href\":\"http://api.x.io/orders/42/items\",\"method\":\"DELETE\",\"name\":\"delete-item\",\"title\":\"Delete Item\"}) was not found in the actual list" diff --git a/compatibility-suite/pact-compatibility-suite/features/V4/message_consumer.feature b/compatibility-suite/pact-compatibility-suite/features/V4/message_consumer.feature deleted file mode 100644 index eee2a10ac..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V4/message_consumer.feature +++ /dev/null @@ -1,26 +0,0 @@ -@consumer @message -Feature: Message consumer - Supports V4 async message consumer interactions - - Scenario: Sets the type for the interaction - Given a message interaction is being defined for a consumer test - When the Pact file for the test is generated - Then the first interaction in the Pact file will have a type of "Asynchronous/Messages" - - Scenario: Supports specifying a key for the interaction - Given a message interaction is being defined for a consumer test - And a key of "123ABC" is specified for the message interaction - When the Pact file for the test is generated - Then the first interaction in the Pact file will have "key" = '"123ABC"' - - Scenario: Supports specifying the interaction is pending - Given a message interaction is being defined for a consumer test - And the message interaction is marked as pending - When the Pact file for the test is generated - Then the first interaction in the Pact file will have "pending" = 'true' - - Scenario: Supports adding comments - Given a message interaction is being defined for a consumer test - And a comment "this is a comment" is added to the message interaction - When the Pact file for the test is generated - Then the first interaction in the Pact file will have "comments" = '{"text":["this is a comment"]}' diff --git a/compatibility-suite/pact-compatibility-suite/features/V4/message_provider.feature b/compatibility-suite/pact-compatibility-suite/features/V4/message_provider.feature deleted file mode 100644 index c0b152187..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V4/message_provider.feature +++ /dev/null @@ -1,22 +0,0 @@ -@provider @message -Feature: Message provider - Supports verifying a async message provider using V4 features - - Scenario: Verifying a pending message interaction - Given a provider is started that can generate the "basic" message with "file: basic2.json" - And a Pact file for "basic":"file: basic.json" is to be verified, but is marked pending - When the verification is run - Then the verification will be successful - And there will be a pending "Body had differences" error - - Scenario: Verifying a message interaction with comments - Given a provider is started that can generate the "basic" message with "file: basic.json" - And a Pact file for "basic":"file: basic.json" is to be verified with the following comments: - | comment | type | - | comment one | text | - | comment two | text | - | compatibility-suite | testname | - When the verification is run - Then the comment "comment one" will have been printed to the console - And the comment "comment two" will have been printed to the console - And the "compatibility-suite" will displayed as the original test name diff --git a/compatibility-suite/pact-compatibility-suite/features/V4/synchronous_message_consumer.feature b/compatibility-suite/pact-compatibility-suite/features/V4/synchronous_message_consumer.feature deleted file mode 100644 index 7cf7f0a80..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V4/synchronous_message_consumer.feature +++ /dev/null @@ -1,113 +0,0 @@ -@message @SynchronousMessage -Feature: Synchronous Message consumer - Supports V4 synchronous message consumer interactions - - Scenario: Sets the type for the interaction - Given a synchronous message interaction is being defined for a consumer test - When the Pact file for the test is generated - Then the first interaction in the Pact file will have a type of "Synchronous/Messages" - - Scenario: Supports specifying a key for the interaction - Given a synchronous message interaction is being defined for a consumer test - And a key of "123ABC" is specified for the synchronous message interaction - When the Pact file for the test is generated - Then the first interaction in the Pact file will have "key" = '"123ABC"' - - Scenario: Supports specifying the interaction is pending - Given a synchronous message interaction is being defined for a consumer test - And the synchronous message interaction is marked as pending - When the Pact file for the test is generated - Then the first interaction in the Pact file will have "pending" = 'true' - - Scenario: Supports adding comments - Given a synchronous message interaction is being defined for a consumer test - And a comment "this is a comment" is added to the synchronous message interaction - When the Pact file for the test is generated - Then the first interaction in the Pact file will have "comments" = '{"text":["this is a comment"]}' - - Scenario: When all messages are successfully processed - Given a synchronous message interaction is being defined for a consumer test - And the message request payload contains the "basic" JSON document - And the message response payload contains the "file: xml-body.xml" document - When the message is successfully processed - Then the received message payload will contain the "file: xml-body.xml" document - And the received message content type will be "application/xml" - And the consumer test will have passed - And a Pact file for the message interaction will have been written - And the pact file will contain 1 interaction - And the first interaction in the pact file will contain the "file: basic.json" document as the request - And the first interaction in the pact file request content type will be "application/json" - And the first interaction in the pact file will contain the "file: xml-body.xml" document as a response - And the first interaction in the pact file response content type will be "application/xml" - - Scenario: Supports multiple responses to a request message - Given a synchronous message interaction is being defined for a consumer test - And the message response payload contains the "file: basic.json" document - And the message response payload contains the "file: xml-body.xml" document - When the Pact file for the test is generated - Then the first interaction in the pact file will contain 2 response messages - And the first interaction in the pact file will contain the "file: basic.json" document as the first response message - And the first interaction in the pact file will contain the "file: xml-body.xml" document as the second response message - - Scenario: Supports arbitrary message metadata - Given a synchronous message interaction is being defined for a consumer test - And the message request contains the following metadata: - | key | value | - | Origin | Some Text | - | TagData | JSON: { "ID": "sjhdjkshsdjh", "weight": 100.5 } | - When the message is successfully processed - Then the received message request metadata will contain "Origin" == "Some Text" - And the received message request metadata will contain "TagData" == "JSON: { \"ID\": \"sjhdjkshsdjh\", \"weight\": 100.5 }" - And a Pact file for the message interaction will have been written - And the first message in the pact file will contain the request message metadata "Origin" == "Some Text" - And the first message in the pact file will contain the request message metadata "TagData" == "JSON: { \"ID\": \"sjhdjkshsdjh\", \"weight\": 100.5 }" - - Scenario: Supports specifying provider states - Given a synchronous message interaction is being defined for a consumer test - And a provider state "state one" for the synchronous message is specified - And a provider state "state two" for the synchronous message is specified - When the message is successfully processed - Then a Pact file for the message interaction will have been written - And the first message in the pact file will contain 2 provider states - And the first message in the Pact file will contain provider state "state one" - And the first message in the Pact file will contain provider state "state two" - - Scenario: Supports data for provider states - Given a synchronous message interaction is being defined for a consumer test - And a provider state "a user exists" for the synchronous message is specified with the following data: - | username | name | age | - | "Test" | "Test Guy" | 66 | - When the message is successfully processed - Then a Pact file for the message interaction will have been written - And the first message in the pact file will contain 1 provider state - And the provider state "a user exists" for the message will contain the following parameters: - | parameters | - | {"age":66,"name":"Test Guy","username":"Test"} | - - Scenario: Supports the use of generators with the message bodies - Given a synchronous message interaction is being defined for a consumer test - And the message request is configured with the following: - | body | generators | - | file: basic.json | randomint-generator.json | - And the message response is configured with the following: - | body | generators | - | file: basic.json | randomint-generator.json | - When the message is successfully processed - Then a Pact file for the message interaction will have been written - And the message request contents for "$.one" will have been replaced with an "integer" - And the message response contents for "$.one" will have been replaced with an "integer" - - Scenario: Supports the use of generators with message metadata - Given a synchronous message interaction is being defined for a consumer test - And the message request is configured with the following: - | generators | metadata | - | JSON: { "metadata": { "ID": { "type": "RandomInt", "min": 0, "max": 1000 } } } | { "ID": "sjhdjkshsdjh", "weight": 100.5 } | - And the message response is configured with the following: - | generators | metadata | - | JSON: { "metadata": { "ID": { "type": "RandomInt", "min": 0, "max": 1000 } } } | { "ID": "sjhdjkshsdjh", "weight": 100.5 } | - When the message is successfully processed - Then a Pact file for the message interaction will have been written - And the received message request metadata will contain "weight" == "JSON: 100.5" - And the received message request metadata will contain "ID" replaced with an "integer" - And the received message response metadata will contain "weight" == "JSON: 100.5" - And the received message response metadata will contain "ID" replaced with an "integer" diff --git a/compatibility-suite/pact-compatibility-suite/features/V4/v4.feature b/compatibility-suite/pact-compatibility-suite/features/V4/v4.feature deleted file mode 100644 index 0f0b29b1d..000000000 --- a/compatibility-suite/pact-compatibility-suite/features/V4/v4.feature +++ /dev/null @@ -1,9 +0,0 @@ -Feature: General V4 features - Supports general V4 features - - Scenario: Supports different types of interactions in the Pact file - Given an HTTP interaction is being defined for a consumer test - And a message interaction is being defined for a consumer test - When the Pact file for the test is generated - Then there will be an interaction in the Pact file with a type of "Synchronous/HTTP" - And there will be an interaction in the Pact file with a type of "Asynchronous/Messages" diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/3-level.json b/compatibility-suite/pact-compatibility-suite/fixtures/3-level.json deleted file mode 100644 index e2c6759f3..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/3-level.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "one": { - "a": { - "ids": [ 1, 2, 3, 4], - "status": "OK" - } - }, - "two": [ - { - "ids": [1], - "status": "BAD" - } - ] -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/arraycontains-matcher-v4.json b/compatibility-suite/pact-compatibility-suite/fixtures/arraycontains-matcher-v4.json deleted file mode 100644 index 662b46484..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/arraycontains-matcher-v4.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "body": { - "$.actions": { - "combine": "AND", - "matchers": [ - { - "match": "arrayContains", - "variants": [ - { - "generators": {}, - "index": 0, - "rules": { - "$.name": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "add\\-item" - } - ] - }, - "$.method": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "POST" - } - ] - }, - "$.*": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - } - } - }, - { - "generators": { }, - "index": 1, - "rules": { - "$.name": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "delete\\-item" - } - ] - }, - "$.method": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "DELETE" - } - ] - }, - "$.*": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - } - } - } - ] - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/basic.json b/compatibility-suite/pact-compatibility-suite/fixtures/basic.json deleted file mode 100644 index 3435fea80..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/basic.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "one": "a", - "two": "b" -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/basic2.json b/compatibility-suite/pact-compatibility-suite/fixtures/basic2.json deleted file mode 100644 index 2cf090348..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/basic2.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "one": "cat", - "two": "b" -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/basic3.json b/compatibility-suite/pact-compatibility-suite/fixtures/basic3.json deleted file mode 100644 index be96b7a41..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/basic3.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "one": "dog", - "two": "b" -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/boolean-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/boolean-generator.json deleted file mode 100644 index 74dc72a2b..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/boolean-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "RandomBoolean" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/boolean-matcher-v3.json b/compatibility-suite/pact-compatibility-suite/fixtures/boolean-matcher-v3.json deleted file mode 100644 index c587379dd..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/boolean-matcher-v3.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "body": { - "$.one": { - "combine": "AND", - "matchers": [ - { - "match": "boolean" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/contenttype-matcher-v3.json b/compatibility-suite/pact-compatibility-suite/fixtures/contenttype-matcher-v3.json deleted file mode 100644 index f11a058fb..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/contenttype-matcher-v3.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "body": { - "$": { - "combine": "AND", - "matchers": [ - { - "match": "contentType", "value": "image/jpeg" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/date-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/date-generator.json deleted file mode 100644 index 3dcc27f92..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/date-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "Date" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/date-matcher-v3.json b/compatibility-suite/pact-compatibility-suite/fixtures/date-matcher-v3.json deleted file mode 100644 index a6245715f..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/date-matcher-v3.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "body": { - "$.one": { - "combine": "AND", - "matchers": [ - { - "match": "date", - "format": "yyyy-MM-dd" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/datetime-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/datetime-generator.json deleted file mode 100644 index 663f9c603..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/datetime-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "DateTime" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/decimal-type-matcher-v3.json b/compatibility-suite/pact-compatibility-suite/fixtures/decimal-type-matcher-v3.json deleted file mode 100644 index b1ddc4180..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/decimal-type-matcher-v3.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "body": { - "$.one": { - "combine": "AND", - "matchers": [ - { - "match": "decimal" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/eachkey-matcher-v4.json b/compatibility-suite/pact-compatibility-suite/fixtures/eachkey-matcher-v4.json deleted file mode 100644 index 4a0a3c9a7..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/eachkey-matcher-v4.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "body": { - "$": { - "combine": "AND", - "matchers": [ - { - "match": "eachKey", - "rules": [ - { - "match": "regex", - "regex": "[a-z]+" - } - ], - "value": "one" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/eachvalue-matcher-v4.json b/compatibility-suite/pact-compatibility-suite/fixtures/eachvalue-matcher-v4.json deleted file mode 100644 index e56c933a6..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/eachvalue-matcher-v4.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "body": { - "$": { - "combine": "AND", - "matchers": [ - { - "match": "eachValue", - "rules": [ - { - "match": "regex", - "regex": "[a-z]+" - } - ], - "value": "one" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/equality-matcher-reset-v3.json b/compatibility-suite/pact-compatibility-suite/fixtures/equality-matcher-reset-v3.json deleted file mode 100644 index ce328d348..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/equality-matcher-reset-v3.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "body": { - "$.one": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.one.a.status": { - "combine": "AND", - "matchers": [ - { - "match": "equality" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/form-post-body.xml b/compatibility-suite/pact-compatibility-suite/fixtures/form-post-body.xml deleted file mode 100644 index 6ba2c8ee9..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/form-post-body.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - application/x-www-form-urlencoded - - diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/include-matcher-v3.json b/compatibility-suite/pact-compatibility-suite/fixtures/include-matcher-v3.json deleted file mode 100644 index 4d033cd88..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/include-matcher-v3.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "body": { - "$.one": { - "combine": "AND", - "matchers": [ - { - "match": "include", - "value": "a" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/integer-type-matcher-v3.json b/compatibility-suite/pact-compatibility-suite/fixtures/integer-type-matcher-v3.json deleted file mode 100644 index 0418a6133..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/integer-type-matcher-v3.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "body": { - "$.one": { - "combine": "AND", - "matchers": [ - { - "match": "integer" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/kafka-body.xml b/compatibility-suite/pact-compatibility-suite/fixtures/kafka-body.xml deleted file mode 100644 index c7802fe43..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/kafka-body.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - application/vnd.schemaregistry.v1+json - - - - diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/kafka-expected-body.xml b/compatibility-suite/pact-compatibility-suite/fixtures/kafka-expected-body.xml deleted file mode 100644 index 3c4f5dab3..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/kafka-expected-body.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - application/vnd.schemaregistry.v1+json - - - - diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/minmax-type-matcher-v3.json b/compatibility-suite/pact-compatibility-suite/fixtures/minmax-type-matcher-v3.json deleted file mode 100644 index a10b9ab34..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/minmax-type-matcher-v3.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "body": { - "$.one": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.one.a.ids": { - "combine": "AND", - "matchers": [ - { - "match": "type", - "min": 1, - "max": 4 - } - ] - }, - "$.two.*.ids": { - "combine": "AND", - "matchers": [ - { - "match": "type", - "min": 1, - "max": 4 - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/mockserver-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/mockserver-generator.json deleted file mode 100644 index bc0700a1f..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/mockserver-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "MockServerURL", "regex": ".*(/a)", "example": "http://1234:8080/a" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/multipart-body.xml b/compatibility-suite/pact-compatibility-suite/fixtures/multipart-body.xml deleted file mode 100644 index b687081f2..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/multipart-body.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - multipart/mixed; boundary=gc0p4Jq0M2Yt08jU534c0p - - - - diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/multipart2-body.xml b/compatibility-suite/pact-compatibility-suite/fixtures/multipart2-body.xml deleted file mode 100644 index 87446f104..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/multipart2-body.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - multipart/mixed; boundary=gc0p4Jq0M2Yt08jU534c0p - - - - diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/notempty-matcher-v4.json b/compatibility-suite/pact-compatibility-suite/fixtures/notempty-matcher-v4.json deleted file mode 100644 index 0d9aa6c09..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/notempty-matcher-v4.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "body": { - "$.one": { - "combine": "AND", - "matchers": [ - { - "match": "notEmpty" - } - ] - }, - "$.two": { - "combine": "AND", - "matchers": [ - { - "match": "notEmpty" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/notempty2-matcher-v4.json b/compatibility-suite/pact-compatibility-suite/fixtures/notempty2-matcher-v4.json deleted file mode 100644 index 7d494156f..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/notempty2-matcher-v4.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "body": { - "$": { - "combine": "AND", - "matchers": [ - { - "match": "notEmpty" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/null-matcher-v3.json b/compatibility-suite/pact-compatibility-suite/fixtures/null-matcher-v3.json deleted file mode 100644 index 75d55ef1a..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/null-matcher-v3.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "body": { - "$.one": { - "combine": "AND", - "matchers": [ - { - "match": "null" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/number-type-matcher-v3.json b/compatibility-suite/pact-compatibility-suite/fixtures/number-type-matcher-v3.json deleted file mode 100644 index 3a1ca2d24..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/number-type-matcher-v3.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "body": { - "$.one": { - "combine": "AND", - "matchers": [ - { - "match": "number" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/pact-broker_c1.json b/compatibility-suite/pact-compatibility-suite/fixtures/pact-broker_c1.json deleted file mode 100644 index d11dda1f1..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/pact-broker_c1.json +++ /dev/null @@ -1,279 +0,0 @@ -{ - "consumer": { - "name": "Pact Compatability Suite Broker Client" - }, - "interactions": [ - { - "description": "a request for the provider pacts", - "pending": false, - "request": { - "body": { - "content": { - "consumerVersionSelectors": [], - "includePendingStatus": false - }, - "contentType": "application/json", - "encoded": false - }, - "headers": { - "Content-Type": [ - "application/json" - ] - }, - "method": "POST", - "path": "/pacts/provider/p/for-verification" - }, - "response": { - "body": { - "content": { - "_embedded": { - "pacts": [ - { - "_links": { - "self": { - "href": "http://localhost:9876/pacts/provider/p/consumer/c_1", - "name": "Pact between c_1 and p" - } - }, - "shortDescription": "latest" - } - ] - }, - "_links": { - "self": { - "href": "http://localhost:9876/pacts/provider/{provider}/for-verification", - "title": "Pacts to be verified" - } - } - }, - "contentType": "application/json", - "encoded": false - }, - "headers": { - "Content-Type": [ - "application/json" - ] - }, - "matchingRules": { - "body": { - "$._embedded.pacts[*]._links.self.href": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": ".*(\\/pacts\\/provider\\/p\\/consumer\\/c_1)$" - } - ] - } - } - }, - "generators": { - "body": { - "$._embedded.pacts[*]._links.self.href": { - "type": "MockServerURL", - "example": "http://localhost:9876/pacts/provider/p/consumer/c_1", - "regex": ".*(\\/pacts\\/provider\\/p\\/consumer\\/c_1)$" - } - } - }, - "status": 200 - }, - "transport": "https", - "type": "Synchronous/HTTP" - }, - { - "description": "a request for the provider pacts link", - "pending": false, - "request": { - "method": "GET", - "path": "/pacts/provider/p/for-verification" - }, - "response": { - "body": { - "content": { - "_links": { - "self": { - "href": "http://localhost:9876/pacts/provider/p/for-verification", - "title": "Pacts to be verified" - } - } - }, - "contentType": "application/json", - "encoded": false - }, - "headers": { - "Content-Type": [ - "application/json" - ] - }, - "matchingRules": { - "body": { - "$._links.self.href": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": ".(*\\/pacts\\/provider\\/p\\/for-verification)$" - } - ] - } - } - }, - "generators": { - "body": { - "$._links.self.href": { - "type": "MockServerURL", - "example": "http://localhost:9876/pacts/provider/p/for-verification", - "regex": ".*(\\/pacts\\/provider\\/p\\/for-verification)$" - } - } - }, - "status": 200 - }, - "transport": "https", - "type": "Synchronous/HTTP" - }, - { - "description": "a request to the root", - "pending": false, - "request": { - "method": "GET", - "path": "/" - }, - "response": { - "body": { - "content": { - "_links": { - "pb:provider-pacts-for-verification": { - "href": "http://localhost:9876/pacts/provider/{provider}/for-verification", - "templated": true, - "title": "Pact versions to be verified for the specified provider" - } - } - }, - "contentType": "application/json", - "encoded": false - }, - "headers": { - "Content-Type": [ - "application/json" - ] - }, - "matchingRules": { - "body": { - "$._links.pb:provider-pacts-for-verification.href": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": ".*(\\/\\Qpacts\\E\\/\\Qprovider\\E\\/\\Q{provider}\\E\\/\\Qfor-verification\\E)$" - } - ] - } - } - }, - "generators": { - "body": { - "$._links.pb:provider-pacts-for-verification.href": { - "type": "MockServerURL", - "example": "http://localhost:9876/pacts/provider/{provider}/for-verification", - "regex": ".*(\\/\\Qpacts\\E\\/\\Qprovider\\E\\/\\Q{provider}\\E\\/\\Qfor-verification\\E)$" - } - } - }, - "status": 200 - }, - "transport": "https", - "type": "Synchronous/HTTP" - }, - { - "description": "publish verification results for c_1", - "pending": false, - "request": { - "method": "POST", - "path": "/pacts/provider/p/consumer/c_1/verification-results", - "headers": { - "Content-Type": ["application/json"] - }, - "body": { - "content": { - "providerApplicationVersion": "0.0.0", - "success": true, - "testResults":[{"interactionId":"ID1","success":true}], - "verifiedBy":{ - "implementation": "Pact-JVM", - "version": "4.5.7" - } - }, - "contentType": "application/json", - "encoded": false - }, - "matchingRules": { - "body": { - "$.providerApplicationVersion": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.success": { - "combine": "AND", - "matchers": [ - { - "match": "boolean" - } - ] - }, - "$.testResults": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.verifiedBy.implementation": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.verifiedBy.version": { - "combine": "AND", - "matchers": [ - { - "match": "semver" - } - ] - } - } - } - }, - "response": { - "status": 201, - "headers": { - "Content-Type": [ "application/json" ] - }, - "body": { - "content": {}, - "contentType": "application/json", - "encoded": false - } - }, - "transport": "https", - "type": "Synchronous/HTTP" - } - ], - "metadata": { - "pactSpecification": { - "version": "4.0" - } - }, - "provider": { - "name": "Pact Broker" - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/pact-broker_c2.json b/compatibility-suite/pact-compatibility-suite/fixtures/pact-broker_c2.json deleted file mode 100644 index e2efca419..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/pact-broker_c2.json +++ /dev/null @@ -1,388 +0,0 @@ -{ - "consumer": { - "name": "Pact Compatability Suite Broker Client" - }, - "interactions": [ - { - "description": "a request for the provider pacts", - "pending": false, - "request": { - "body": { - "content": { - "consumerVersionSelectors": [], - "includePendingStatus": false - }, - "contentType": "application/json", - "encoded": false - }, - "headers": { - "Content-Type": [ - "application/json" - ] - }, - "method": "POST", - "path": "/pacts/provider/p/for-verification" - }, - "response": { - "body": { - "content": { - "_embedded": { - "pacts": [ - { - "_links": { - "self": { - "href": "http://localhost:9876/pacts/provider/p/consumer/c_2", - "name": "Pact between c_2 and p" - } - }, - "shortDescription": "latest" - } - ] - }, - "_links": { - "self": { - "href": "http://localhost:9876/pacts/provider/{provider}/for-verification", - "title": "Pacts to be verified" - } - } - }, - "contentType": "application/json", - "encoded": false - }, - "headers": { - "Content-Type": [ - "application/json" - ] - }, - "matchingRules": { - "body": { - "$._embedded.pacts[*]._links.self.href": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": ".*(\\/pacts\\/provider\\/p\\/consumer\\/c_2)$" - } - ] - } - } - }, - "generators": { - "body": { - "$._embedded.pacts[*]._links.self.href": { - "type": "MockServerURL", - "example": "http://localhost:9876/pacts/provider/p/consumer/c_2", - "regex": ".*(\\/pacts\\/provider\\/p\\/consumer\\/c_2)$" - } - } - }, - "status": 200 - }, - "transport": "https", - "type": "Synchronous/HTTP" - }, - { - "description": "a request for the provider pacts link", - "pending": false, - "request": { - "method": "GET", - "path": "/pacts/provider/p/for-verification" - }, - "response": { - "body": { - "content": { - "_links": { - "self": { - "href": "http://localhost:9876/pacts/provider/p/for-verification", - "title": "Pacts to be verified" - } - } - }, - "contentType": "application/json", - "encoded": false - }, - "headers": { - "Content-Type": [ - "application/json" - ] - }, - "matchingRules": { - "body": { - "$._links.self.href": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": ".*(\\/pacts\\/provider\\/p\\/for-verification)$" - } - ] - } - } - }, - "generators": { - "body": { - "$._links.self.href": { - "type": "MockServerURL", - "example": "http://localhost:9876/pacts/provider/p/for-verification", - "regex": ".*(\\/pacts\\/provider\\/p\\/for-verification)$" - } - } - }, - "status": 200 - }, - "transport": "https", - "type": "Synchronous/HTTP" - }, - { - "description": "a request to the root", - "pending": false, - "request": { - "method": "GET", - "path": "/" - }, - "response": { - "body": { - "content": { - "_links": { - "pb:provider-pacts-for-verification": { - "href": "http://localhost:9876/pacts/provider/{provider}/for-verification", - "templated": true, - "title": "Pact versions to be verified for the specified provider" - } - } - }, - "contentType": "application/json", - "encoded": false - }, - "headers": { - "Content-Type": [ - "application/json" - ] - }, - "matchingRules": { - "body": { - "$._links.pb:provider-pacts-for-verification.href": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": ".*\\/(\\Qpacts\\E\\/\\Qprovider\\E\\/\\Q{provider}\\E\\/\\Qfor-verification\\E)$" - } - ] - } - } - }, - "generators": { - "body": { - "$._links.pb:provider-pacts-for-verification.href": { - "type": "MockServerURL", - "example": "http://localhost:9876/pacts/provider/{provider}/for-verification", - "regex": ".*\\/(\\Qpacts\\E\\/\\Qprovider\\E\\/\\Q{provider}\\E\\/\\Qfor-verification\\E)$" - } - } - }, - "status": 200 - }, - "transport": "https", - "type": "Synchronous/HTTP" - }, - { - "description": "publish verification results for c_2", - "pending": false, - "request": { - "method": "POST", - "path": "/pacts/provider/p/consumer/c_2/verification-results", - "headers": { - "Content-Type": [ "application/json" ] - }, - "body": { - "content": { - "providerApplicationVersion": "0.0.0", - "success": true, - "testResults":[{"interactionId":"ID1","success":true}], - "verifiedBy": { - "implementation": "Pact-JVM", - "version": "4.5.7" - } - }, - "contentType": "application/json", - "encoded": false - }, - "matchingRules": { - "body": { - "$.providerApplicationVersion": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.success": { - "combine": "AND", - "matchers": [ - { - "match": "boolean" - } - ] - }, - "$.testResults": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.verifiedBy.implementation": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.verifiedBy.version": { - "combine": "AND", - "matchers": [ - { - "match": "semver" - } - ] - } - } - } - }, - "response": { - "status": 201, - "headers": { - "Content-Type": [ "application/json" ] - }, - "body": { - "content": {}, - "contentType": "application/json", - "encoded": false - } - }, - "transport": "https", - "type": "Synchronous/HTTP" - }, - { - "description": "publish failed verification results for c_2", - "pending": false, - "request": { - "method": "POST", - "path": "/pacts/provider/p/consumer/c_2/verification-results", - "headers": { - "Content-Type": [ "application/json" ] - }, - "body": { - "content": { - "providerApplicationVersion": "0.0.0", - "success": false, - "testResults": [ - { - "interactionId": "ID1", - "interactionDescription":"ID1", - "mismatches": [ - { - "attribute": "status", - "description": "expected status of 200 but was 500" - } - ], - "success": false - } - ], - "verifiedBy": { - "implementation": "Pact-JVM", - "version": "4.5.7" - } - }, - "contentType": "application/json", - "encoded": false - }, - "matchingRules": { - "body": { - "$.providerApplicationVersion": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.success": { - "combine": "AND", - "matchers": [ - { - "match": "boolean" - } - ] - }, - "$.verifiedBy.implementation": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.verifiedBy.version": { - "combine": "AND", - "matchers": [ - { - "match": "semver" - } - ] - }, - "$.testResults.*": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.testResults[*].mismatches[*].attribute": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.testResults[*].mismatches[*].description": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - } - } - } - }, - "response": { - "status": 201, - "headers": { - "Content-Type": [ "application/json" ] - }, - "body": { - "content": {}, - "contentType": "application/json", - "encoded": false - } - }, - "transport": "https", - "type": "Synchronous/HTTP" - } - ], - "metadata": { - "pactSpecification": { - "version": "4.0" - } - }, - "provider": { - "name": "Pact Broker" - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/providerstate-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/providerstate-generator.json deleted file mode 100644 index 03a9770f6..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/providerstate-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "ProviderState", "expression": "${id}" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/randomdec-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/randomdec-generator.json deleted file mode 100644 index 631525b14..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/randomdec-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "RandomDecimal", "digits": 6 } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/randomhex-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/randomhex-generator.json deleted file mode 100644 index d89256991..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/randomhex-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "RandomHexadecimal", "digits": 6 } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/randomint-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/randomint-generator.json deleted file mode 100644 index ec8958c74..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/randomint-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "RandomInt", "min": 0, "max": 1000 } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/randomregex-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/randomregex-generator.json deleted file mode 100644 index 27b873899..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/randomregex-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "Regex", "regex": "\\d{1,8}" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/randomstr-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/randomstr-generator.json deleted file mode 100644 index 41e8cc359..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/randomstr-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "RandomString", "size": 6 } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/rat.jpg b/compatibility-suite/pact-compatibility-suite/fixtures/rat.jpg deleted file mode 100644 index 4eb2392321658cff4f77c61d7ab7406791529420..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 28058 zcmeFZbzD?k_cwg#5NQykOQc~4>6Gr46c~oip+Q7RY3T;(M!HczI;BAYrKJ@FMCv`G zUiWqVe$V}Tp7(kGeeXS=efIaPz1G@muN`w{X0N%KzxfQ{E6FL!0Z3qCM`8znn-z{m zS#Mh_08mt91uy{szyWRn$N(;gvcY^C2^qKzmK9*e12H;Swt^WS#6)1(31&hh6aWb< z5&13>+OKjN#1wzgnws?oKS7xN5(5AY z0k|O$Zj_h5tqun1sDE%Oh>_xc^B{peL5};arvgEb$NptUJ&2Lxf79C$vVZ73AV&K` z9|18&{BKX7f$d@a!3H44{=+j3@;r&h0Fc3Q5zL_=MvUDu!oXL5?BwC*Mtb$PN0DFs z_Gl^w0HFNAwAcWEoA=wEO;8r^4?X}fe%^0cP(5(x9~j8T_|+E_kd9RFn+Jpd@;@-j zKQL;+Z~dpBPV|4!5i*Fe5d||fm_bv(5#a$x1R@OKrbUd4fG|JbzojAkf{^V1GcA~5 ze@d_b5O)1_pAO#8g7-MT=!m%s2Pr7v3*o4zAifP^dJu<#7!#x-v_3x}1VG;cq#zyw z>BxWhKZ6+k5B&>>@&E7x01Fb)FKKYDf^;O1pA(ct2OA*-F(rsCK#U4vL|f^I#{XNg z0uZ(Wz(W8FkVi#&_FrGpzqo^L5Cy_dmp9nJvzvA#ArKRy>i&{M)FI{wGazRRcl0uc z!)Wzja5r0LCve7dbAxuLqs$_iA%Sa0_@AqUR!Kt^Ttt78F~|qbMev142tyGb`R!Fi zi3j!p2|%MoV6Z1Bo1lMf!Ce3ez@$Y`esfa;lIV!J_51E708s~ z=iA%>Vvj;tDG5#taP=Vl9{o;`iu4B~lpupU7sA1opi5Bx>|%(Sg#Hgq@&_aMkdS}t zu><*$|6rN>UhMMXwB_%B*Co?TQI|C)<{Rb@U+&p}Id^F5LVuBD+PF_9;LJ1NU78cGe9J1TD z$slx;bddk+bkhkCV1gEPq9D-&$OK3z1V}eM01ap-8sc3839R~^z)cPn4IKj$3;Pzx z(14Hd6bdpb3K|+JIB$^r5t|??0U9A4j}$tQra1<^D>3AGTrMVqbWInD*2GsvUJH02 z7B=Y}GV;4j_n7aqu=4Q>JP;HTmU$>EC$FHWq^+Z?r~k;n5NZjtvbM3cb949b^z!!c z4GInk4SNwD5ucEll$?^9mY$biP*_x4Qd(A9SKrXs)ZEhA{jR6CuYX{0XmV{gELm>bpfP*KJqf`|t!eKN&@yqUh>yL0ZkXV3SF4)WWgPCAo#8a2 zW~zqrc@M82&@2|3wWPheCrEZB5U!|>h|2c(B zqJ^Ykl}c;yd!KGgc+5(ti)}sI7q?|bm5)EnZh)zi*Vn=J9Y;5Si^S4v{%@o;mb1&0 z$Yv^DjU$}|oj7hL3Ep)|3eO;Km-gx(dGYM$-vBZ**OfOwuI>$hTqMec9IQgOxqZR`TB0vgWj{GeJVqaBEBEDG?6zArbpYhZhbMYCXSQXFG;b`z z=}6wyz0+1^bBm;Bck*CrO~W_gve#p72R*y_fGvDh{U;v)_j>TXie2KH!M25N2?M$w zBSF+pZ7o|zn4*PaRdG<%Oew=X%Kb|_3`rD}`MJD_b%vI&IXzeQ$}Nw!oExMip6#ox zH9xl7xBSsmv~%k^g=Qg@Ditj!r^~*N@2lvlf2oyb=nYU+FvninJJkQsB-d@afrij_ zE*Zi{w8apc7`w?fa-hF|CqCG`!Nu*QOhjJ7P}#d6E|s2_#yq`5Q)3~4HS+`S0>1?D z&K-eklOCL!b;60#Jt-=C3jJ@mrC$7DKlu-nh)cXGR2QDFwqmt&lo@wxzCeiq?3(O9 zk4mjZ!?LP7(AWC6P1MxwDK87PS8r==EaWO%t^!kd5V>FmaBO&_$lHPu(L9#j?O9Ge>+o_&J-#;!)UiaJYx6n!{`2gXnRl6Qr+0$+%Rn~>zh zcW!_Xk>LrAmxlAuX%6bIHa=)ona7bY)?==^28{bN-M*L4-o0z|vEuOpd@xYW@>#;* zPyM2TRRK3iVgTImL+MbK( zzzAypn4I=~*vNaFeVjlSWq!5o;|)OI?c41bZ(>-V|3ijT*6Q((Ci&)i_X{0p3<;I| zNvUl|>OtJ7iGjCOHw!d-cqAGsZW&ISo`HeVw}SHkg4Kb)@#6e z^3`lZ`Gfk_7=cAY0hG8e_EA$#M5-BQEdksPWN@JO8v4Vl2>CCVMi(UmFke0k0PLKJMzK*CZ4+B{u{&|g zqvpiO1gG0X*k(72hy5~PJ zx(9!n_`}LwCMwR5Z2t814}%f${ZbkYiN2I;Qj?X7m#==7+yHlv&3a=n3{wXNFe>fc z*rg@H_ z*QB?7HF_&&#Ct4V!zkOb^A*uN<)%3u0j@uWKCMYFo2t`Ba~63)pNYO3wW1&8>lR#F zmK=j%kj(Nnh1UNdz4y4uxUV*TI8*LjvFo%)DZDXAk;u?yb&RQX?zJJB=nL`zYwDi6 zVVd6*c>FVzAJ=sXOI+690B=IzKSNGNDkhZ~LK%qdlJ^A zFffHP$NHvNcCuEp`fXkCT`B5m&+|c}pll+@tT>ncfZBy4o+b=sY>kKyEP#?U72lc3LA@{PL}^ zFU1XV0}N{f(a1cSo-pp94#QxIN@2(TDv}=Ir@*mdpmi!l8AaA9L;}@xX8pG6P(rp& zW3Fj5U(sJB>(rpn>q}Be!^XluLq}ZoR8nF)HP3rAU!mZ{zdqjM@y|`xyJuDQsw@9?LonZX_%q=MJ2!Z)goj_j!wnu?&js(!4ZVYY_FXc@ClJE}RM6(aL}DzN7;A zICE>V!Y8f-l`sH1T`*fmC?#g~)uZdNPrDRUp{!!KvZXoT{qgFAC~i zmTqaZX*O8-7uOiNWLt1IafZD`F8|n*W6oeLIhC`|RjMTL&LLzfr+r5T?b6?_aHZ|- z+w_$gm%Wkl_9w40pZ}bp=urDk`Y`C{k1!< z(UIx3Higp|glFpcqe|@01k(}k9cz11P|8u-&av~(QKP;W;{`jzk;r%mmr$nCX2x{P z(E4X{2)9J4rS=b1uo9SQp?vf7ch)V^Xs}u3_Ur`v2k=@7c=JP-h>-a~aS>RD zrneEkinV;%8mon7WsrzJ7u_e0FYaoi&#kR;%#{pH|@mF)6`_-Vi|#d z6*G~bc1kejQ-VIHp6bEFeP>UXV+dED?U#9vac%W$_oon&Ye3p znh6u-S%I)X$xVJ25$=JisqP2%dAZY!;mS*t4>P>ay-@n!&wA*7o}}~=vPa@+YHm8p ztWCNm>h}$Of^NQR5*zNscg3vN+A%rW-k4(aq#6)$$yHauZK)DgQ7Bu^uf+EYqKjGy zGi_ksX{YoMDnaTJEN6dXso>9}nnX1e-6!5~m-5wPH}#j9$1%8qCAu$AsJ~Ry#+WyBb@0Fsoki)*i;{Nk%phv%2_ zLY0*{EflYY;?B+jl!nt6<@S^a$c2{D?>2U3ouPRqiF=Pz@$_n?EocNrb$X5~HHHnD z89PN?tQK0;x?W_{e7cvnF+X10-Xwb-^V#nK|Lu2O?ES-0H$bNv3%?0_GGWiW^FeHP z-OKxo1HG1}xA_<4dWiB~Db@}i_z({l2J#)h!s*QJD>qV#A1fP8?l<<>n#}SX$Fis* z_a%tZHVUGy?uUzRpc%;1o|R*4#%@w5e6j1uj2J+s?vPb@YVw(~^oqp;qsH*`z1o*= zpTou_l*Z!GLSDRdUu|-buJ^9jRTv%qA@#Cf25>yOEOqmgPLG!F{T9B6Ei2L{6#E$cB8>%an7WkV=@znB6Y}Wl8?W z9P;z{C%6iNRa2{!V`pEHc^WXT16emJ;v$3`O_#lyG%EIii?PXX4b{C7MmGgVOs~)^bN*%nK21)+%wvu##5b`s^ za=BN?DAQ}OBoQWSkWE-iy06iDrmCp!_NMM3hCSTgb5?>qG-~a`s~|=9PlC5Ms(AZJ z3UPRnM7OA-U48QF z-;5JSR9l8%p{ZMTVYSaC8-JW*W*EiV^*6yQHrVo}(7e-_udg2L%N;_pb@hxoo%+VR z&hUnfok79^$D^1Z7271HvT$?F|fmT5j_8g4PM(nsYlk0}x{Vx|W<+6Q7*`fM#Pnq3=01WlOZaSzUW zHJOl*q6fqxgXM4cgek;PPG&R|7vZgqBx zH;7LQXL$2=ZgsmmRXKAn{(SU9<41(0%p((V(arsKGp1(@{y0MT2lsNL2a??8JPZ;y z9-vI?k_tLJkJ62|&w#Zwn5mJAo7oe{O}=|SJ5x>=%%!KnMwf5tg|PaKr*v%dQnG1Y7my1az(vwGoloUfd8W+_ndvOGTG%;{(P z=)xE4POGLd_|vzONl_IlFGky1n^bfrlW}oEblueLUj%#WaP;e<@W1A0Bss7iXMk8E zDSSVv*uET8s>Mt7A}RSAM#(epvp%a`_hAeZNB+!SNM38|{jl=$t?DfDX?MXv8l_ty z%_BDI4i*&(6Ie~HX!56IWW}lbhD(+a9-JNAi`RTDas~1(xigRG!%#9kZh#XV^-cCC zRXR~|%8a!uuxOndz?&djBqC~gVDUKtXwCOxr|15P6SoY_-R3eUyQAw5mtvpJP<6Z; zsA|5o=vn-Zt-gP1}pEjuETos)PdfkNK0!GMeqS+cczQE zB}+9{?XG`Abg4(W6+n|Uu=VXVLq}WPo7rN5?re=OpK-F$Jbo63b2>R(_uGV><@8Cd zVTegp32Z4ODAmO8)#juEBFDXUYihWIsl_ZDnmOFENzZ_?Mjo9)poPu_YmaqkfN|++ zZLow1C;uaVO`27o3bFG+g62-~70;WXb8Yh!zohQhCkgJdK z%~yr%%|5u?u9s!dC#BRp&kr?>db-8EM|IwYafNKQIb8{&O3F>Oqu3Jhq1g#GTuNqqcW)BNt;wF$(s;2_8VO-$pmHcuoIaJ{YTgkP1wywd%t~ohn5gW04@QPjEN0g*z3iepo-Dtn zy`#mDZRugNdYi%v$4!?(qe6EO*IRo&y*9Z|(#TWYc(rOfd06Zy*Hi;pXvb|fxJo3_ zJb!WEJY6@|WySM^cw?%tE-e{)G5*+Y_m1o*{PPzQTg#uNot1xJdNWbTShoDl1Luh^Q&g{&p~ zxEpagDZ8B>fter5i?1%e-Mp1|P2bFtHRIS)XCHuRj4VV#2jBTzo#MEVolttU1f@IrTcf8sOMto>ibotJc-Yb)n zlgM4@#lRmQ2xn4y6rBhY$4l?T^{_wG0;InS?T*=xIm75;-VF!bcC@=&-y|um25d+4 zG&k7HjdTr?uj|YYw(YziA=k*tRd{ULfNGW&MB4SLu8CQ2oZn({p^90TYMi*z!r85I z0##pto@0S_&fXb^_gTlYT~i-hdsmB%=d5_nhC2?g(<(A->BhC5)=wFwm-1C8yN_l0#XoK98%&T_88AYq(dh=hj;K$jckDagKb;pJ{?;C5!)`RGHRv`^i!k?Fse z8teW>^-DpxN%3wjyZe6nrm@==l4=5|SlF8U`S>}U2FDSdNC zq>nh*bG0vBAA;X{uCNLGxsrXTH(@62 zbIU7?VG318pw&7g+orPMZF~A>YrI=>o!_$GDs4DTWYr0AF25eUva9DFu%vz%O%&jR zJ1N0xJk&RoVWKw6MZ7I=_EhyvIFa(wq1lIbv9HfpOYI-LE~$>o4-trBt#T1h5`9V| zPG4i9wlK*ua8Fhg7Z;sd49#+&b`J&ZD$G#n$p`Ww0lKD4x#&$+OZT&KR?(T|)UA?z zaDOjpBz)WP3@3)WgVm4C)2#ae_{CDXBN5z0L_2#*TN%nZ@UfKR8LP<07wvcJl{5%JZ6ai%i{BP2**F;ze(CZqn%P z1FMG~Y+=lldeiaJPx7BPThn?}SsNvx=e@Gz{1)Z4t&!Xs-4a!dm{}S5XNQm`Vd$u7a^!B3L8+3)O>XHv^ zA(H#f^u=cpl)5io;Pl+LqC&xmFmPsW_AsE5rWUv-d093n=BHs?y62~NyGM2qu6A%T zX$rMcwH|idEM*lO^-0XA*%>Std6Zd@-+ZV&3jXr=o?CKMe1onj5m+$qBIC!wH{K!I zYl!s{+(DNR{#ZD;CF@tPY@7UEG(I@oOK^{^tm$jw#7fo6!JHLTymfouk1CPsfnqO! zEl;2RLcyg^{| zVRO^5AST$Vex+7Vg>k-LvoS^UJ1^m50Vy2Y&}-e9g%A7+7$5!r-^o+9~BY?u<`$Q5=8+vP*lTv>d4b zYmS-7WHJ}|l~Hqh0jPrX98vq%c;x5Qf)Yx*^^K5iH?_~}Bz^K?sxrdz{zsx_6*2(z zOs_1w*xh5sNnH*u>^-4IEtpksfGtRqGt!BY56!+K7hP~f zK4YZeCL3cZWvb5Pl&2aY)t4h}vpG9zgD%C|O~rgp|H;6}bESZeow{|D3(m3HHlUM|n%?Pi7ZxNMDkf|O%u-%R*Qg(g<>b$dntWK8I;)F%0}yie8;&%@ zY?2qfi+r*8bwC*!k?kJQ8xnK{9bv&cu>-Qv}ec5~^y%r5kgf?ZF{T;E@; z)+^n<7#fN3;fPV&c{o*d?8e>}`)I#KBCfKWxwd7{DCr9iWef^lNs;PG%R#}N!ZqF- zz~e}MBO-fP(#FE#>+27A`nURz#+8KF4+%R8*KA4n;k$%J6}$>zqQOI3fe^c#;H{2^ z=_-rXcLmSdNXMiATvij$evOFtsZNDsa_FZgB;iopTxb8=??xFO_Fz~+n-V6&j~}Ot zM2I%li43dCsK0IVCiFh*6Ng!uV)no7u-c(^&VYXvT8!xC*M5iZU2!^L!mr=j7-tb- z+*bRwXKUKCvnpbYOm6U2{yvhL&$`#o>7qi*nF#v?ogt-tj_l3I&3Pn~_N@*#!<{4)tRa#1|r*A=#NR{NHXv|9&Uf}(DnMFp$p|e|cW(%Ay^Iq=s@R<6z^&mBTDhNMA_i;oQJDBr=Z4s=?SYy_)Uz9a?ql4Knd9=H8LHd8KVmp)#Lq;ng^+8jD((hA)?rIPceYzL ztB?og2iliLH9{93WgQwAX%{#~dW{<22mA_fK}LHd%BN#ic*Ik|>MTwUi$2^T3j`!uZCG2iy{ zH^UD4g%s@p{*M}ssTf!45|b^pr5l7U6pd_cGZm8_?T@a_2gZ#l=TPVc9-+Kntt&bt z9!j77Nw807!k2ang@r<2jN2!#d)+{V!%}HM-DAiN~!z z#n~}YpF%}SPhvm8gRqeyVIN6kr7(BpKrw|P+nTqhAw6y^kJjU|xKnh^^7K) zJuc$rH9@P6-tQ@czbmvR&ZsHKXjpix8|9<QF*w z!+Kq}K}_eOJLWg*RF_w|XS9kYK*_S-=W6KN(2~sYl~N@~z(3S3y5X{yw|* zVgPR9N0+CkB{`8R6KS6KZ7kuTb82r^o1Ca!kBr#;UeNFu$LY4OUr)4R%a39QuPshk zd+TKQ#Zc5-`RTbnsnzOO)yengurgpbulHW$ROnnQH0yVzPNMWYQw7fmB&+y~vH9O? zN;jb1R)VZi=BV3^O|R9qM-_jfQ`8>lbyrj57dl$FIv~_ZZK`<5hIXj!*H7Wo zoS2i+0J%l}wBPPqvI~Ki94_Q(OF6!6f}&^LeF84&FpQi^`FeOrYvO7?L)dgtyqJ|j z5+-3xR%?Q>YLA>yq1@-7oUx98-gF#$D=zgf^I0^nG6sRvn{n6ti}S_g>d;RE%psePMo|#&6f4KYjnM zqTFC2;~Xd5WL{hN1JY)l515|`a+DTp-aJsmX{sSk2z^}aloPG{zCOcVnZfZM49kVd zyz~+YVV#k%l%lpV|94mjVo9FZdcJS{Szac(Z3X>cW{Mu_&oorqm8P znI%l81(rLLnpl?dq%%4P`zSBGNYG~^h?7644)rJY2}F5}`=-tM6vi>#+SnJn4G}RZ zdGeBsWy9$CqkS$fJ$t`$F-*>pN*tAbw&vO3Ti}sa5r6Lzy62WPY5h8kvCwoQ+schr znpN{=PF&NV{21{7qal5eB1Dgzsjn3@H$ianLdTR zfQXy>$SauWAdDCC#IH$P_9}u z?ZA14uRqfx`I(2R+DIEBHW%5={HkNCSv%*Ey8#%XTp zYysnhIy-WCo4auFaB_12q7vRN=1_Z>JFNxG+SW;o{;=&WJ*};!7`;BfDz~bO49vz> z!50qG_Epn?`r1Q3fOlf_e@ylA^5XR3<#dKybMXiX z32|{lxF8S?kb}d`$I0E?o5RVC;V%sjVQx^kt&2P204qYHxrMWbyBIy#{x8G88mO!{ zNcf`~asNxPo4X~KB6!0Gb_8VLf^c&Sa&SX9AVLV+e|O5!MOF1*lK<9!j*kE0c5|2Y z{JWih%k8G);{xN-g1I?+z@ac%PneTC!(WZLIC!}Ivn>xd7=rZ6RtHNfF3`+hBlZ_R z;*9#g_z@1Xwsmy*#f{+ni^~%FFCG^UxWlh{ODGr20p?*C#x zqUOIe4mR=cIsckhi1tC}2|sj(dLUX-d?-ebFi_ai8ER`O{0j;^5a6}4Fz4s6;Ik0q z0J9K>kU1|u2j2rLb4!SYpam2n_?I3hMU9(&8=Zv zb}le$dRm0zg*9xwVGfTTf?Ayr$FxDMLVSGxtTXsW9RzIgSEq$#;4pJ{XSj~Dvx6A@ zzaknC%jVCT5LR)9nj_ZDFHV@{pCu`6Zp{TkuK$_qzt@0q6kbmM2kF0f{*sk~yL&mq z?bYGx7IrYG`~M*QzjFO0uLbT_Ztid&#sA6d|HANlGAe))&TyYUZE3??{~!lj+Fy%T z*c^&*hZwz^xhD)9^4~#jj`++WWtz5zAkEUN$h$7&b(# z12_GzF+pr_zwJis2jKepyGw}8LRb+oceFiRTpVm+@W1={OZML~{7w8bUH(U*zw3X= z$T++BfNRReUERy+-#7KYaQu=}v4w)u*!kZY_m4U*L{I;*wS#8;B7yH02>7qJA933M zr2lx}KOXpx2ma%M|9Id(9{B&C2mby53v&V=(!9Wrr#Jh6js_Up>+ELZ>_W>61_BF8 zE2<)*&A=o`iTnV-M#S`Ta0_to@KPd60T_S6hAENd0JLAB!<5LP0PbJWz8t^6%_?%W zqKwRA4J~y!MHN{v_zw)+d*uLibVKC^07oZxxR$&$Ef|MJi?I&If8GWoB!vKSbEun( zl!k`N?^w8hcRig&1oi>I6bC}rzvKAd&~96TAAi9pI$DrN66ykX2eBE51-#r{5I7aY z#NZRGC5RER+3ax8KoHL(=oY{69s*nc!ib`^0SmwbjL38V z+yM@-bOI@EfEHLU{XfXx{UxsfN?CwXwqV33Eg1Xh3^)SjzvKbL7=S!r`rEf|R=kj3 zBqSU#N)~PT=H{3Q44_T`fUB1`Hy61#H&=OJ;3gP@-s$wWyz?t?!kmEgmw(e3bHMP{ zU;t?B`I}~u0su{606@0jVh%U|)eZ`HMYgg6L)MGI@L@3Gmgqeglxy_cZ(tpw9>|{q z03Fa*Dx(09mJR^-tU=wz|3kkKk=y_B+y9p5kNYv$A|J<406+G08P@-Wt~m!p5ajY^0+oM_DZqFi1PS5Q}qWzgX^naw4$ z(+39e9ZpZaRX=7^2!I$*5#*NV6m5xqHCmCMWMh;dY_2va$wZ^m%;U(l+qc`|UB1qX zDd6?3Zz?dJ&!q)Ey@I*gpcX~%H3*4?mF%9h(X{mRT9p#{8uxy%+Rd;#rZWD1XV4Mj zov%yV>+Xjfk@v)0nlI4dJJ(^t$jk2CCby(Rj4Z0%Kiz7x=CgQjyE>3ZBzeKFycOB# z=B>*LLpsWSqqNkH+cFb{Ix2(0ecVU(@kOP9nO?C>T>6fVbBrk7Q+TW73_Bz>uc&He zsHyi^Ki0*fWdr%h>f>57y`_eC2j9QI*NA^!sr;yu)Q96l7W#{%34AMx3Le8I(MCpV z6kLjFa+@Zyc{=Za0nOF6ey%7;lPOyt`MK2Tg~L+uF^xv7IZL)knL>GP1j>rmJ{@F) z*h-KnJFp7+hyrEvX_{5(Ub_J%%*2lNY8SZ&nTkT_a_6au>-#~zLW037=2Z{+lg#Qq zzQ&gY{EN7rWkwI;wT~yeq(k{g!Mh`+?0A-JmcK6gSoX#Z+aj@rh@pZ% zspo8Wequy6c`*M>uJ{hp4FJv^CNRtx85s!`jLAm&J%3P8kZ30@?yZ9rpvm*ey4<;I7$I*(%qDWQ==sp&$h--<(vMB^Q?Dgwb;XoOjaEp zZGSYKDElz)QRi4sN=EKu&#`(I_WDy+dWYCjzecPAXP?qRgu$x!m+}y{4$7+I!)LSn ztTS2DRb^ajXlcqn8mFu2xoWDL>1IcVMN(VgyvwP}i zc>U2{s0bl z<;X`oCafqa<<@B`zaEW-(5$cOq6SWp$wScfNG6CIc6d@8ttH(K$xz6qFqd$6F2%@J zNV+>2Y*Uq^f@qvv(!B4JDYrHmw(dk3)p`fdaCXO%a;jp$DcJE~y+!{Yk z&AVUniZDGRFoO2EQ%*Kt_|QnQ11aT>(yhSJIQg8jko=LogzfGd;OS7s3sF0NQ+7EQ z3z9a?DFsW?ceY|06{Fa)Y-MY9LgkHfV<%W@yhz-xR#R536_I+Uq-!36=g-QP)9v~x zecv?{I%=%hKAUC9xa(?e%)g^sq8n!W^r2DeNqei*Gc;D+N-Xu)_FqWKh)p&^kb3)U zM=E>K8?iQpBM)2j6=QRqHB)e|G|J=VS*(JXNBZPkX_-laaDZX{jFg1RwHb?(3$aI> z39}sZEo#MQ8@*wsg9;*qlf{f)qZWN738*5GT^Q}~AjTP1@!9@L#bXA;)=UQrm5MKA zhO|Y+^_q32elpcRlKFPN-dmXtr@yqqIqNBm*Y8l}MpofWe4$CpL~d{vdnclurt=U5 z^104zr06^gGJx&;>Ajw{ZY_@VxAOjP(_dJV0gI!E`_t{W(P3;F5kq@cS#W)->MF)t zn#iMN!-OS51`oKaIZ72SxWFaHLjWKG`_Yh5(SH9c1o&73hL#i3q4MAp(MxHXyFz&5 zz7k76ubDt&$n9GBwfw;CUjixqX_-pJ(Uh7J?huzga_KZF{@Xm10p5fGBy@+JlZGAg)pA1YML82u&~^8lZb?-LI|>a$KZ&sR*~QljP6r)cum)0n zzY63ydclg9g#A7UsT;?P6?Ma1+VK@@5TxX7^7a|{1HImP2(boC-eEE{qGc~8QVXv; zOQ}I%qx^OEuGWdiLq&FMf)%w$AXOE=c0YxK)Ivd*FFF9Guk_P8xaw0e#zRcXyRM0I zpK7D-KMQxix6vYLs`{}~EtR^cWQ5I$Y`^<(zdwe0FmH;b0c-Cy>V{y_f;iC_o!$K& zA#Hl~GB&j?&C_$O>;t1*;lk}uBQ&1*hk<&`w>;EThTM&&Gu=ObSV=q*;SdR8<4Y>B zH!|retAuP3Mg^v7ERdP#4PXjQ7DOi$G~N2TkM38g^0+&Hc*)f;^k}3kCNPawN$`Ff z>x;|Da{l_oLfc`Kp9w)imOpg6+C)8C!|Dp0T4G(uh6>rK)juV!TH6?HKneuz@IqSL zlKh+XM+J3r``x`(wACmti?+Ax%(6Ed#HqROxKBVL(YFh-rhZoIjjGw((devX-|f)I z+bgzX<8OLcEM6Fp3_Tng$lu9AT`@wRJ~G@;R(E5v6MlS4pv*U5bV0}RRMCC6b1-Ymy9VOLQv8B;DyLcOqh$hBZqff|7NW z7B>|?n*==EYWk}E!jmRsgzw{R@b61TJ2!-z{oDfmMJoBh1K(vcD63AopKrbBwwH={ zrf{}E6Nz1YAfQ4Zjn}%MD`r`?@Aty>D*^s<^$4;U)N z6rKL8lp*7xDAPH|b;UUJz?s(cPzYYbAy`6J+CT$W2Ji2c0siF$=C7wd@Sz2Klp#Q@ z48+2~H+N-##MOL7U=isuZukg1bNq!&Z%T`C7JlZrm; z#0h`h;i`WD(H`aH2`P_mr{96m)Y1Di69yThv1W;Q-Zr*gOW9p*jyICTfAaS6`vmpzlRd$YB2tg>KkTBWN`yv;#!KaH9<7_fPz>!JNz z(C~q7NJnBtv-Z?x+5sy5&jHzqCNI1Q;bsgSe&2vosc&Fxs^6`IR#EYtxHG+;`qSu# zL8F9$*oxVzL|MkKWZ?SvUeYR)WnxBYGqPpSz9;wd#HVw9-mjSJ`aFkOU9FR6s7AVh zjlsM!&$YyT(#>GKV|n#FNhUovgcdnb^%GW{y{)@bf6n(jTp+N} zapGk@V;`2G%lv11k$d=G3&>4uYJ+75YDzmPNyzS$IumG?%Xr-qzwKC=_pD`9leaeE zUZnf{RHSp>K za{>Cxz%rwqJXdQD{EzRH3^#T1&NR7a_`fIkmaPa*K&&4bsZQnFXwIbBxt3Qq$3`+Q z?Oqm9>qs8dYUurES}-bxXBw`}!>3wYHE*gK9!bo?noJY~xXjYH3Mr zBJ;+Prf?O9tgRp}YS_f!#7lhar6REPvO?t}2-!;tOeiml;4!!?W>+MJM{;zZ(=Y ztai75;Jxs5mxF%$u2KMB<@4rUor!k0AaCB6>eJ2ory`qA5 zVUBTar1#7(`G{+9UVoB9<-wca8{k8)@%?chLZogkBo11dw-Fg>ef#L%O9@Zkcc4RT z7JK{lmnFUo7tB$nk+X6OlzgslojE0z^l^-d+$#ip!*75WN=~Ab)j!no+CkAmZCsz!*30WO(xvd~R>`e9tyMcGD-R+*uqyU&{}@la2-mY3jqFg@+l@F(ik5pI57 zPTHwe5g$I1oi#8vT}t;dyqC+7QJYbRS$}s9GyS5`YGpdz!cfc}_!e(V@m)nrvX+{z zMKrTP{_QB-W;1d3(~{v24B9?4ceU5UT-T!JloTpCgIO`$5^p>5*FrA@rjcDc#C;tT zjw)gcrl1$O@+&5<3-7et05S4Q%I{X!^lKIA7F0j#?`yfT6!CJXglW8PX_{?hSx1&*1CyE^A#lE`t4Tayj^2wUuNmX^j~h{_-(S2z4(=CZ=*~A2j=&Hn zsq~GG(F^9VfIMlp8FysaJ4-S;`gB+*t2*P?-(h^=B~}wyxprM|!fzwS`%vzK^3@50 z(!%Y8xY=ef26rgkX}a;X5f0C{@{3y%1)`A6O>dL@rnb3c(RHC$sK;zk#>SsGn%@e?1SQT-9!-9CFNUteRMC-S{(cVo>oeU#2V z&{WlX+;GrzJX(?1HbkEmFIsH++h-m@p|3^v7356EA~wb_XVxM%r5NsIx6r-ef6-(< z6LB?tc!wQ>Jc`#dV^IQIV0Rg|yAq{eNN3#o<2*e=(0!YeRQr(rO&8-h_M4(WtNE#3 zToNjs7oB}s)fh+D?u_A$Gb;OA((~28Jb)N5_?>9$-MXsMk8P>%md!M!40+maX}$!YQvcg-&Dm_w3o_MFHsdwso(^ zN3tbxht<#EV)!htL!HMwYt|exwGn8n zVGM*H?|V`5q?yzcIot&*w#hQb0NcR***NjsRP%J(*F?ZpBByZZw zj>kgU7D?hf-0X>7($s%FeGRKQeWu&i_c?j1dC6;*&5SS_{a9-!UN5ZJH0zJQ3XV93 z49%qugZ2ai3)Yn~wiE+a(`JvWt-c;rR}@UnaMp^aLJ#RW>tbHtxaM zbLcmXX4ZCgqeHsotpkpV3_};l-_GCs1+asSsck;%$Z1<>xdk6AStjKb>1vEFm$yr> z8=JD(F!S^7c}(o_)iwyxIGdDtawKTVc^`fG^_88>PnB{AcXBh7fvD}a>4If4j~e~} zWm?2-w~7%`!f)D?6C;cwZSh)G=w*R)9a{GGMG)>RWYa4@jD9q`4oV+8)FFv_L zv3}xJX+qTNI#@Q#kPw)BJ)MlaKCcHcwJravHdwwwIc)XPh_qXAm%j^%ho7?!y7@y( z)_&x-r7SYB=yM?3pOxKK5pb7oYR~F-)7d*cSM&smGOV9Z>yeF|E)piB5$o%>cpsOK z#E%nuSLJ@q9v8f%9Pctb2Ds~AK;7S5=11^Z`(crP{V%pPv#G+ z3#e8Xl9|Z6L))tEig*P1sTVfV>L+xj*Akq#;GtpE%a~UnkvNLTXi$KaF+%%jIr5iI z0pXC{XI(LvBP`jYNlw4IA|QS|W#e$&`9^C9`VC=~$ zsQPBRav=T(JW^q#HL1ayZJlN^y>gZ)A%%YGsclY}hC7Y3d~?)@&xF$p^79mrPyf5q zb7DxMh~9C*ji-gMm(LsCU+gP$Lm5CqTti`o5GS1TBQ1Bh3I?rS@}Mj1 z9d^Y>v@H}_3^J{ymD8S=?J8qbaK~=?Wj}z%HeW1$f^y2qYFZr1;$A4AYClbcz2RN` z$8(v76)eXJ*tdi1KqoPXUo;LQ@a5I<&lCGEx%j7X@aX`2|2@oK=JuNU&o-z3*AbJT z%jj(CQwH+oO`0n|cU%o=sVQ$hnHLmD9<{Uk_}w;@a1CsOaDMniF&HNxr^Hxiy47z$3RVEnB_Nt5#6~1o zWzhKjSsWRoB~PkkVO{huL~#k&g%A7#TOzHM#Y0~2&$GKIhuzcSgotq<^x~TepG3_> zQR_yxzX;y-=iyux?y)WV{b> zEb~B`cY?cI^fVT;nfv=p#8(lgiFZ)vX|u!eZ=Sx-Dr}7GcY}ddP86dtYGGUin5XU` zs8ytiWJ2hOVxRdj%SIPu@F20b<&W>j@OY*6dO24)*c$QfEm#9Oz!&l4l6pz7gJ4(2 z0OnUeFZAwogNa9A-ii@$^mt@#Gn#vt%!0 zKo`LxZTT+isiAyxGu0k6vw5p|R%}Y!g~s@1QaSu`cPRPy&JkS5 zB3bhn?eGp;SnX>CF)}vy`;E8C|$^0sqx}*9mM2K6Fx+W4mF7jCh!Sg^9buGWwM!S zNyn#$&p(iziN+cvwVk$oe)PA4lu2_>l6TqJgZP+Jg*0Pn;yVPUh~6+xdoylP!WxbqQkbb(W>kFDder#b3f0G<`ovt@tFW)4q?rT_!cI zPcnBN9^xUlrBQj$&mcnq49V(%p|z74`FaZRO8+Kp3P2bXR7GZbqS)0m-QeL67LA4Q;v(8Ffc`bEvZBmo^`Qnx@Cyv#gZWp#bXxH=E1y^ zYlEY0C#@+k>>IpghxhD;75d1u2}~fg0CWgKus7R~nUdtG&V3pCj8;%<#Ch_f4XKiR zdDioSK|^IemY*f7*`C+<@tG7$=yI!WskVe!oFp~2)6yHO-Y|CV|3rK^Pf}*g!;5bW z&B6bbY0=MOBLe}+Qsl~=5<;VYF@o7o%l;?3n>91c zO`%_q#+7WYZ5`ptYNxRb6U$!FFY`5CdiAEg;!cK+D~=qkP69Nw9Xr*s z2aD}~Qq52ZOV4oJ4NeOyJKDUtOtL9s$K6f4L_dof%zKX?#CbLLL8M6m*!svj^$nBc zOlZe{;QJ$R2x4|vP|JlxlT3KFl~+Ru;6&-OePNt`6Quv*P@vh;h*#|Fip7R%!t2ue zs}RNt3ge9T(RA*CME4sf{!FYP)xSW~xnt#~q@vwpX~;Y%J}1@_j#4LBpCU5;0u-3b z9mliU$IR`tQ~kJHjCE!XS7lz9j|u;U`4A9M|H~Txhxq{bFLSg1nD6X``ldI}|54w6 zZZuHMxcNA%{2YQN?H4?N?!0^z()n9nW#Sq}_=-j=^zuG^0p8ql(xQT*GhQA1kBTkP8GZ&Aw@~xoxo85XviTP@W5p^ycz% z_b;!H*~qnom)CiSCIoFEIHa$QiqlqYs7kg3XWKkX_gcSN%!qbXcz<>achRLh-{l+?A|<)#&Lyd0 z?`E&V*+h3)3mCxGMG||7_V9~JUF2~`{XAWYdd@NXh&0H$Ml$e!8TJ{@J+EF>do^e} zk85fg2^VNHkAF8h?Cti+4l`b|g30buV`Y+uDXkt!(6!UFCN+Jz4`$}nCp7b)fGoR6 z^qipA^))kij*nB>`zdesZQXSSODx?~MCK4Wz2z&t#S&x=pY46J!eUS(b5oiPD+M74 z2KE8i-tP`wo|80loR!|Zs%gnMaAjE^$uAB~Cuy(08&c?$%ZKhg0KAGnx)<_tigF~} z2gn%ge5aP$`yUyvdVCKXbs8!I9fP@GZSglDh;fryf2y5b{fzzste9#OPd-@=xQERK z^TQfyC$t;VPk*tOMiWz(E`j|EGqAQWmTsSiyMPU&#S$VJ&M~R@eftybiH0(E%-Vsi z9ZEIb!?uCGFJ?wPWWgVm;?esd114M(Bs#$RmVSPGMMgne>~EJz`N#3FH7R-X$cG}-*s{*43>R--9rTEGz5c;UqGFoR2*p%vOZ zqjbd~fUb$<_27*87cd0EdUFW0Hsyg4Q}uclRw;0ctnV?#HGQ}qT~YhfsGG)Pv&w|k zghZc%R`dR*13APSFD^Z6)e={JMfzIQ#tiM9Eib>91e5#05Kb|9W;HvJ@=bc;7vDDh z<3qMIdaO#ZJmrYxBoovgIiXt#M-HFPNGQ^EQg&y0I6FZ2X{^H;%hs47LXx#WxN}%YOnu25!&9n(19@w-Hbe*f$ znN+*YdCd0R!fY7}H;>5lRx2FuOKt3REFOYTXIv>XnwEq>lAr){}c9n1P5}#dW`M8mu5$8zv~Q1Owc%i_}a5-`zVB zh{)-R3JQ)|eIKvv+q;caUR>) zcK>{$!MSCEMxhXntR?yuf8Z3_dk(yU8cg%Lh}MkFk`g$nTn073PTKeQ2mQv2k0+^M`r?5u~D6;OQ75YhEib9Dc7-65c=DU$${+d|99EacD-<14Bf?qBQJ@Dm_{5O{VpNRN(QeK)&|EnQhQoE*q*W{&5 zA_2r8fZG{LZPH^U4DAD-r^uqWq~s@s(^dq?G>zc$`GVvHiCFlv2)3 zbA%*ZM4CFPxjfB2kb>SQD#?#nvZ9z+M3jcJAyOMPrG|?320Y^OAOPLV{(g||J8TF~ z4@ffNpb3qYk&}u7CkJaKrDGSU`5D#QgxfW1EcztGSKgQBoQ!?)ocy@7d%7_ABaoQ? zN*;uzuBag45{PPo2sjV#zj-2EcCKH%WL3fX^w=$hnjMo_A zJZV({UoKPumzqctlu%|ozmYaNn`nx+DfP;iZVe`g`1V1xN5!rtHWSa1sVgm8+2Ab= z7(&9TDmrk%I!-M4;Yjr3iWJ#YU*Y43G%y6Ttt-TFHaZd)g3Pp6z`$}!zA=+iXO@U# zb}N_}2f)I3N}$+2$vfJXqi>=tXekEg8QqW1*IxDs`#PHCZF1&D@;wGwKW+HxR^l+G z>~U2@go1KQO>lE8<_{RLWo6WZqefiSlGuqIPQ=d!TzQIP9;#l-H`1qDmW#)%wP#2P zhxZ*y!@s}O&2OEL>Fubn#xVWp634rB(XqB>Q_?`MDj~1mAn8xTjQ);wLOl^a#;ALm zYQ%Dz*QYp9E$qsd4QA*!mfCJ;fNuQcE%6fz7zpdILqsaV|8UA@glTf)cBgAov)yczOC>y!(+MtB3XbC zzuyCoU(@p*6q}kT__`lJMH7>hpTddzofWRw&4DkBgwgQh8j^eWPDJhSmkDog#lq3I z1ckQj+5zMa*d77xmV#TA@hmRur!YV^3A7E(;2*|IJ5EugelJXi0F)s=&gsdnva;S@n`>ngCK}?g_ zz+10xI1n80DVc}b(^hvUYh+6#l}ux7Ep1MbIlyfp%@iW$s~gP8-*AUJrlDUP0a`QDnXo@o8m@i0{yz{Mm|LL~t&pcVw5n2% z?b>8T#QbQn_e#5k(N;^h!c}!z1Q?hg{pBMqR6y>#eYMT4(-+U59Q?L8+2{9TVZRVD zOpslTA9w7kxQfuiR2_sneCyTRkXDsT%A3ix$ z?Dt@jVkf0k0rjQ0BDffyH6uKf``1kpd8J4;zFqbL|L`*Sh%fwFitpRb%xEC z-Yu2bX7W22>((9l#+X(=0JB86d_d?ZG4dEOo?cS}Z0%7jE|rmKrO)+;0jEz5-TYOw zf38-H@q2`soF*Sitxc_}BP8=$N{IJ6sy4+7tXpP`@1k{SUNILXmndKGG}1q)-Ulpj zAu#uOhh`?uq@o>Y#A@IcGDNWOksTSbLnGp0PRqj-&=3dpMZfS0nKZt7 z%}^raIyD`U5F)3ND7oNy{R zt|_UZVbDk05{zX@8#Q8RE#|Y-x-@(2zu^u-Rvd05HhL2PLGT}^E6R96ci`8lL#5Rs zl;nD}@Hb)oW}jiQ#bcRGkqt5rA04Qiz9yo2_Km# z$Wj15XBd4;cCdy2lJqM4uHFP4MJJ_&&XR(@hNLQ9r67w+_E(6~w?-^~xo*Tm9I0!Z zPh~Tm^&wybTJ(7fn1ZLs(3_>?b-*!!Of}Mbb4KNiCV44#)*}1XL8)KPh$AIIIYCXY zOC6KS$Z10NHXg|d^fspJeT1%bt?lUm;JexGH3F7DNfGM!XINBo69-a*t4ON#%xTtN zulc2zW8^Bo$-ez4O;TaSrgO$eQrM(XO29~=On&SP#4W;>uE{gVf8J4)wA0|uAw}Qv z?_(&nt@odmLVp%|vMh(ZwlrfwjxLOpXB#!LL6nYA4&;$#Y@v-w z*KRM^3I}XjhrT2^rjDpgY(fHkJcQp@8po7seK;bQbq6qSZKT_P$2lkoC>^@^+yUf> z^kEwAs47*`MG){(l8oVtbA*e73bm_>d_9JtLrnZ*z8+Ij&5KUYKjnB2Thgr2mk;8 diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-header-v2.json b/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-header-v2.json deleted file mode 100644 index 7d1f98fce..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-header-v2.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "$.header.x-test": { - "match": "regex", - "regex": "\\d{1,4}" - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-metadata.json b/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-metadata.json deleted file mode 100644 index f14bdfd08..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-metadata.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "metadata": { - "Origin": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "\\w{3}-\\d+" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-path-v2.json b/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-path-v2.json deleted file mode 100644 index 672b3274a..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-path-v2.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "$.path": { - "match": "regex", - "regex": "\\/\\w{3}\\/\\d{3}" - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-query-v2.json b/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-query-v2.json deleted file mode 100644 index 48d8592ec..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-query-v2.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "$.query.a": { - "match": "regex", - "regex": "\\d{1,4}" - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-v2.json b/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-v2.json deleted file mode 100644 index 7d8681081..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/regex-matcher-v2.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "$.body.one": { - "match": "regex", - "regex": "\\w{3}\\d{3}" - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/sample.pdf b/compatibility-suite/pact-compatibility-suite/fixtures/sample.pdf deleted file mode 100644 index aac7901f4e16ba286a0e60a7d1fbe0297194c41e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7498 zcmai32UL^WvbGBmRHP{ah7v?tdO}A)dhY^K0)`Hugx&z zAOw)!r3nHr@t%9nIrqG`-oMtLz4!bxdr$dhuay~)`aM~Gr~ncGYRqb^ZY*s~1Hga~ zAkNGNASw!0#yVKyt$=V6MH4J*ZHLD?gJtb7c`0 z=v5h_U+VPWr%3lAkygGZbb${lU+ZQjz0k3rjq6@7_MW2aaoU_6%9 z5hvBhXWy6KlZwc^Gw;Y^CFj*Qoy@iBRf%eiUrk0~iOJE$y9lb6efhQ_HIsXbZNqU{ zmLi8Zy@R!*n#oCmoB*8WF)Z;6jZfo@;-~`;@=X2Y+1K_Su-nKzZ9BGo_D366I2L<1 zEsuA<*`BH_J|$x*_vLb=_F!bhnYiZD5$PVvVd6s@9{hKI@{J5%y)D%}-oXCJUv58T zb94NQ-l8e}-Dbt$sO;buT3Aq1;RcPdc!7`T5p)6-u;KS9I}65oI1xJ;el7JZcV~{) z$OF2|owUo05oujbeG}bOH5aBb5}VEMNqc>G8*%q+<9Q7Nn(hYiWI0>n41G@??gBjP zr*A7L^^AxpHROb~(P4bPa^C;M|3;AR3HZ|wM~29HHKO=4nnyrR(?IUiCkI5qfo*!#A9RT_Dbq)v%FYoF z<|#dyWv%bXS6q^7TD_&2ezFO={D^WWY(U+Ix8OZ%hf5tJ(|lo=yA&l=hY5erT-BUE z2ak)w#A}D_TK&Dus4a}g1GMTW0BH627F=D~M!p@$s#DgW3>vLhjOA{vyPY;cNVC`#OI% zH&e5^{ZD>~I{m2j3U$TscMA1)1j5-Oy2MCj-uXE~@WWPiweGkVDug3#2a4d9ri!1q zTW+}l-N8CaoRO~@4VD7F3G9@L9ZJ1XXL&vi&{&vYuR;kw7(Aae74jjVO{zH^Q1u|6 zDx~|TY3Nl~8#V4ICCID?jSs8)eR}o?xZ#vG6{Jf$p!hXwbY7W}Z@RUPUJVD&Vcv8I zy{k=V&a&U&^^pvL!+K&2!Cu!o_I3vr^wdTU!IpP0pv~E5kbl>uQ9Fm9S$|jZ*l9%a zvaX?hA6HTbM8t38R`0jiO+^?`BVAfU_yjq<;Us`4x z?|PC@)*JV3S1lxi;|+W|w6FGNwAAH9HPIN=-~qDPppx zA1;1ttYfpYBea@Bn13&*)Y)2CO`PZv`m;!q(QT(}aImL$P?%#TT?{TR`n3$b9TQZ` z$hvf;hWm97gEe*cxI%T0(luWz$2ax45iM84*g7=9Y_bCD9<3K4%@x9`Q~Mi2=TzYg zRuql%PeeOo+)G}HYz5q9HcFl0m0u3E=;+T8d}ac#SDfSRJ&dR|Gf6VN#td(f2VeU> zwk=kAov3`{We-%^peEVpGQr}hZ)9~7ox=VW4J%VQ(aRLKpnStuSY$ti`D0P|=WLj2 zXHFD-9Ub=ZmL+i?&)I>x$wwIMxv3LZrtxalo-Q<9nLpb6olV|&+X8>Yw;00B>ouXW zbO{`JY8va0d5S298J&CM%;0Fi5w^@ftU?PhE{u@iQwDu03myif#7+2kBD}mie#sg zj1q>Tfh}Iso@v)AeH-1vp(Xp=FV4~${lPgqZ-??74C)%j%9$s{j-PnRxq5qRxfH4% zwI_&|+L~8Bs8rdtAI4wl9#EP5wK8qn^9Xz}_YNEUg4gK1o4x(Jx60cn)YtQk)N>yT zUJMo)xZ6Wk5LLV@7o?q>JaOg2MZvX+PX}r+y|BgRs+#hKaCa}K3Px(` zb?eRZ)%$UbA=ROh;8O1osqHVkHRhZ52Lh+`!E@CMKc>C9xV|C5wX4ng;5Xx+2{qSu zY`C^y@qxoPjeQ2fCpV^{0=thG%RDrA_Jv;#@kF9{f-Mt44dLdDBcb zxPmuNoqKU9tHOKBW>=uZ(y@1H;(EK=Pn~cH zY_o^TVl6av7nzguuBo<3QZK3g^d|l7cO99*pu8`4hZNHMr+1AC%S^-s(HkSQI)462 zK|3uEmWcrYJ%q@P3E!zx-?(JQUk%}$%es!14zzYh4XUmRWBH(qC_C_LU*cHd*bQ#Q zU4vuVbHekJw72L?PG8h+EH7-f$uAgGz+&A}7vjVjwXCkV_HV0Q-31;?Qdx^)<)I$3A;fAQR&;uI{PhLvH`!tkv&QEca=cevA}A zTi)g`mtiiUKIC_DqwVW6u=h3A?8hA$Ykzjk(7L#kVJ-Y>W||nI@GB9!??(4^o~VhT zJW71aHGIBzOL!h>c&{E)>8|;EROKF4I~Q6`f7u#m5k5N3TFQSYYqY;v10gh^|)t2@{nlf04q^gRTU~AbZr-Wqcr==E%1ZvkXv-G7%VK0 zY|sXIh=ZFmZ2IlgMdf=*IO^uQ;};bZC-@_|FKWc~&6<;6*|~nJE@R>S*j?rWvP^^yGIB8qO+ zR$pk{l*dL!j)6g{(Obroq0gs_A1Rdz?4d^0&+xJhz^-WSjgP-_IdR*aF*%${81fI% zXHGhNRZ-aePFUvlPlb}1HxikVFPQ3k8LP|S5TiVWm02GlV98+>1V3&9jh{ z&Q4X#`py*wVc%mydqJJ2nZSY0v45JW|G~{K5<9c6J(qC}NhgJi>!XwVtLR{> zlJ%+q7BCWAk>i?BgiYsKQ0&$$JRml5~*su4RMt8k; zq8^OyV51&A|d*YnmKV^15_#M-OKm{rr$2<9SwN)G4)+rls zYLsNG)z>MkItW#~;JR`vO>0_=DFV27kD9?WNp;Ya2DQM1xGx9GWXccWnXp6*pK@QQ z&1jHuRB<nsY(&yuf*xc2NBZ_>rAuPuFoS;}^Z=+0)?J!Q`9fXZ6oJGrxFt;t!OnVKho zX5B6FW`du?8b2tjvJtffRHMI1L@PYZ48zRv8A4ZWar*PKar5F3NlOHU?xu zr|-|6)HW!8aXO4xj=$Tg*n1xZeaSi3BWax%dGA4xvjUe^>_camuC!CsL$eVNb|D_D ze%s5^xNu&f_-zlq1O+NybMuRN*ct0SrVLgV`Pu1N57GD+ZeK3T*W@{Yeo7gM)4l1W z7|>RXAi(n!Sk3L-1Zl%Ttx4B(g0Go}G3$r(-oPgE$YPbKEInm%uWA}73?AHu<>4dP zYY3T#!NBv0g%&6zN+C4~4`T?2MbYg(w2O#q*}TuQbE@5v^F{jI%nFayj21b~R_44M zP7qUeQ=UN{G0bW3Hjhie)nr(*0u5L!XSSuyqf{WZJsa&Udhs%Iu2#t)m6qjFOVNC$ zN5$jC#K9=4Vwm`#%Bot?pvuN^5$Ay$UP;XQjoPEYSVRC*9wdp{mHOo+9&`4T_CYMI zxociWF5z|$_3HCS8U{iJS0t%2fW4X&dK65-bXi2;!*cE9+-v!uC1Ed=lo+BX=Jn$& z&8Ax%zLcXBUC$Y+c*h+8G3pmV$v=7rnJAYAG8Se>k>^%{N0_1>6)))~BRjo7aaDyL zXgfj<6F99NTEtS6MbDAMo$&m1{9szqCSl**g}bWWI;)(?YG6|HhuVnBb5NL`@E0Mz zJ}1_rv-gyuzsU_qvsYYSH;xyCM#@y^Sa?6;JSf+GZd;@{TVBSQuv8cHg>_3%>Y%YY z_IxYG;g!n0Z_ZDr%60n>^S<3+m5f`acW#fFmsqQRXPFr#hi>&)=w=dRy z#MK(oG}G?xnTHD-QXuMdyL%;<3kirQZ$rIUzPgPF>VVd5<;UbM?iWPZ-iUIky0(%t ze_EXW&bP8GTl)3QXK!ZbE$+S23R}*hRNdqQxaNPa!6Y?#WKn7Ecim!t23-@mWtglM z^2z&VzgNo44Z0=z0T@<;Ae7DU%ST|?FUlHO6Ves7W@yl%=eTKClp3+=Q57=@J}1-?r$VIop1`rTK%#_*L>8vYF#)AMfPmFS~4c_rN`l zy0oqS=vcf5HiaHk+XjCW(;_sl{4`nq zP~KM^7``WG!4~iI^n>BpQEQM9`h(R8gK)2$r8-&y+&B8`TuGY(THmy4ZVp}?kd~xW z+@fY4xWOECyWLj*i_3M8-xbdVi^=pKb>~>KpF22p(p@aOf}y%PBM2x9pyhaf^CIR{ znKb-hVUfd_S8R;JvRl|L_)d&1*8%EN*)`uwDJiOzWj#yju5s~^y2cr)oTA&?Rv!FB zWY(teL^N2|o{h(Qjk7v7m1Puu7f<>V%vAf>(M#%F_=1whrMfh^lyxfO>c~P9xkX{| zt&3F0rwQLAc5645-!3N;3RCNNwjbQ6c0PD|-~W84UJpeAf5PJgj)eXMwFjuU#K}h3 zKE1)o&a~{jNGIckH?%>ilQ?J4p4)}?+ZF%jh9=auG> zx5c0H)>#h=Hc}GARDZO0C7hp6N`*%y({SAG_387}r0~151tJa?Tlo$C%%9iaOd>2N ztGSO8e|^%jO=e81Jcx62@Z0Us;t~5AUyKC|RC!mD zVr5EeWR%eAx3&QGLP1j?dUm4BJlD$}2wQNu9Me-vYcGzX@AyB@xEN3Mem)` zP%$$_QJz%B@Q9iQkz{WxBPwHRK&`2he*O5&%I0?H{2che%t)Z96kg+ELn!BX-!(Gw zq1SrN(al5Hc-@0AqZopDe>SRL#8KE$9C&bMS@g8Y)MY(WEWzBgqRg!R>D=k?AyFU@ zIeO_V>yK)E$g7K;QHe6}l1Gq_%sU>JqdK?uDkhBPpU!z#X`4HyJq`J~B<{AiI`Wq3 zavtQpyNT6>O&6kE6m!7fSk>3hwa=L<#KyB_DL0^|PfxY4!rRl4$(?iKzog1uO;J`p z_Wq>ObTlGL&#*t$WfmWq%cm!Wrfr##cSq0A1T3;yuXmrRg64=6HkA~=6e>urb9(K3 z;(xaKK!o_e)KJOQQF7rF2}eNwQ$$7oSwz+Lbi{&XaSr%a5DG)1fkJ{% zBd`J0oJx1wsH|Nf&dh10EcI{%OJ=b!`|5`u|k5jh?sLGy&;?_r!zR7tPfB zm4(l;sasrc$zXiBdpaUDODfZ>gB@ogmQ>mkw3X8C@31m%Y+%doO|9yBbMM8o{!-HZu;Y zGORucq_oRQMo@s7P;Fev4g{`JWX)Bl z@9d0TeScuGQhw(^(zoq7Or~wNS`--;-gKU^WpANSAW3O9&{EvaFSnpBmnPEhGo&%3 z)W%to{9#El|F=5;U^PdqgQPk6mN5L+Uf}!s20#Z_JG+1VKuCKlV;n5Gu@3x_I(Po& z?0hBXgjjXeb;Cg`jw$ zklPT*?K^+*hlexP0ssL*5dhLoe>^}G5(!5FEr5T}PzW^X<fRhkw%`5GVJ! zl@f$VAs{d~N>Wx(R!9bY4~>w8qfpW^FewB~5=DyB|Ib2(?>(Hkv=!Fe*2UEx2!_FB z;YfrK8X<)gg2T`dw3HMYB}GEgJ(Qr7te_Mbgf4iDGyZo-(GV!9bqbPElLh<_lUz`Y diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/semver-matcher-v4.json b/compatibility-suite/pact-compatibility-suite/fixtures/semver-matcher-v4.json deleted file mode 100644 index b44d4fad3..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/semver-matcher-v4.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "body": { - "$.*": { - "combine": "AND", - "matchers": [ - { - "match": "semver" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/siren.json b/compatibility-suite/pact-compatibility-suite/fixtures/siren.json deleted file mode 100644 index d2d5fccdf..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/siren.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "class": [ - "order" - ], - "properties": { - "orderNumber": 42, - "itemCount": 3, - "status": "pending" - }, - "entities": [ - { - "class": [ - "items", - "collection" - ], - "rel": [ "http://x.io/rels/order-items" ], - "href": "http://api.x.io/orders/42/items" - }, - { - "class": [ - "info", - "customer" - ], - "rel": [ "http://x.io/rels/customer" ], - "properties": { - "customerId": "pj123", - "name": "Peter Joseph" - }, - "links": [ - { - "rel": [ "self" ], - "href": "http://api.x.io/customers/pj123" - } - ] - } - ], - "actions": [ - { - "name": "add-item", - "title": "Add Item", - "method": "POST", - "href": "http://api.x.io/orders/42/items" - }, - { - "name": "delete-item", - "title": "Delete Item", - "method": "DELETE", - "href": "http://api.x.io/orders/42/items" - } - ], - "links": [ - { - "rel": [ "self" ], - "href": "http://api.x.io/orders/42" - }, - { - "rel": [ "previous" ], - "href": "http://api.x.io/orders/41" - }, - { - "rel": [ "next" ], - "href": "http://api.x.io/orders/43" - } - ] -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/siren2.json b/compatibility-suite/pact-compatibility-suite/fixtures/siren2.json deleted file mode 100644 index e367e0d10..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/siren2.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "class": [ - "order" - ], - "properties": { - "orderNumber": 42, - "itemCount": 3, - "status": "pending" - }, - "entities": [ - { - "class": [ - "items", - "collection" - ], - "rel": [ "http://x.io/rels/order-items" ], - "href": "http://api.x.io/orders/42/items" - }, - { - "class": [ - "info", - "customer" - ], - "rel": [ "http://x.io/rels/customer" ], - "properties": { - "customerId": "pj123", - "name": "Peter Joseph" - }, - "links": [ - { - "rel": [ "self" ], - "href": "http://api.x.io/customers/pj123" - } - ] - } - ], - "actions": [ - { - "name": "delete-item", - "title": "Delete Item", - "method": "DELETE", - "href": "http://api.x.io/orders/42/items" - }, - { - "name": "add-item", - "title": "Add Item", - "method": "POST", - "href": "http://api.x.io/orders/42/items" - }, - { - "name": "update-item", - "title": "Update Item", - "method": "PUT", - "href": "http://api.x.io/orders/42/items" - } - ], - "links": [ - { - "rel": [ "self" ], - "href": "http://api.x.io/orders/42" - }, - { - "rel": [ "previous" ], - "href": "http://api.x.io/orders/41" - }, - { - "rel": [ "next" ], - "href": "http://api.x.io/orders/43" - } - ] -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/siren3.json b/compatibility-suite/pact-compatibility-suite/fixtures/siren3.json deleted file mode 100644 index 9be0f11ad..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/siren3.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "class": [ - "order" - ], - "properties": { - "orderNumber": 42, - "itemCount": 3, - "status": "pending" - }, - "entities": [ - { - "class": [ - "items", - "collection" - ], - "rel": [ "http://x.io/rels/order-items" ], - "href": "http://api.x.io/orders/42/items" - }, - { - "class": [ - "info", - "customer" - ], - "rel": [ "http://x.io/rels/customer" ], - "properties": { - "customerId": "pj123", - "name": "Peter Joseph" - }, - "links": [ - { - "rel": [ "self" ], - "href": "http://api.x.io/customers/pj123" - } - ] - } - ], - "actions": [ - { - "name": "add-item", - "title": "Add Item", - "method": "POST", - "href": "http://api.x.io/orders/42/items" - }, - { - "name": "update-item", - "title": "Update Item", - "method": "PUT", - "href": "http://api.x.io/orders/42/items" - } - ], - "links": [ - { - "rel": [ "self" ], - "href": "http://api.x.io/orders/42" - }, - { - "rel": [ "previous" ], - "href": "http://api.x.io/orders/41" - }, - { - "rel": [ "next" ], - "href": "http://api.x.io/orders/43" - } - ] -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/spider.jpg b/compatibility-suite/pact-compatibility-suite/fixtures/spider.jpg deleted file mode 100644 index c0a1e9350d79db148f5c9f3628df4efa5b399906..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 30922 zcmeFYby!@>(l5ads_w4U-BVSo=YHva13;v%tfmY=K|ukiB0qrpO+dYp zpMxy`pr*zRzy<&SSO5|fGyn<`{XVEsF#cR2NKErLc0^*%ziCjBSP%sjK!Ch@A`c-X zCPiLDkcUGQ`XAnOB>tU{)&05u*HuMLOP5iIPf&nQSOm!_AS5g)ASNjUViXjW6cmya z1|g|YUjDl+zh&HS;!oQn5bl!v{H|VnHZV5`oDb^e!tZC}&M(L(zz>j?^K-X>I>8Z) z5V)O#s|@qkt{!Ga2bc`AiKv!T@MYtE# z)4?6#;O5HsTcQob%^M-ZjI95!hPk+FY5gPkf7F4C%kS3yq4q*3`5>A9ZyNJ5^mm8z z>%+a=ygi|CB_Fseg5_WAFz7$L?%tlxe>wnz^243sE=V#jq{apRsp|LB{D=B?hwU6( z-2ZSOHTw@068}T=KQ{h%)yN)7D!M_ve^;WWD8u|)k|fLx>Hw4c10gmb2oz>302CC4 ziUUQ2pb(&hge@E>0)yL%*+>Wp3PJ?_B}dKG3t{65h5wd=l+5RV6aujk5D>Nz69x(h z*n)r}0^$-th=ec<2o3_mgl(V_|Kiv2bU+Rx8|VLQ&u=*}q#U@oxR`*jmj!sfT;1jQsI1O!Dz#l!_91jImM ze^VI4J-v|Q<+rFmv*}OcCBdF>8-$yup_`kt4D&ye?XNN|)eerFr^vDL7n#HV^yr|%APG2B1Sl>J7XXStg$02S8*y=H+%R(f(Jt4&=zQ(rzg36S0Z>2;Cwq8Ge*WK6@vpA%|37>Ar|dtb?Z3#8Yr${$&(b2x_}}zj4g6OF|JA^MHSk{z z{8t12|I@&~ofEh#($euox*qpis10gh@MB$l9c495C8TEt0HD5fhPrs6KLP+;ToIo7 zDhiCIX6B4o`v6P;9RLG>2cWfqdb!K%>T3RWiT>ww`Tmcu1(*l^mi0f={I9tLFr*WL zbk-P=6px_po(Lp0JO8;MDg1{(~CYmu1O6WKr{UizI6`3oQa#&&=3ZzmBz z>S?GCM)r*qi5czw14I4;L+w3XkTghl5Wos^aYf2UHT?_2e&e9u*u~i!*|tCFw}*-k zcQrCVURjX`EkFgJ2G9cN0vG`hfH%Ma;0!kCTen`e!a{$tQ1BH10F00K_utu&E_50IaF=|4e_M2a ze^(3upsfJ_y{`Y}cY6r{NL(QElmDH^S_l9<2nPUK2mYN0$pQe{q5uHOWp^7-n?LoS zA%=oV`A} zPOL9ZM7)x~WeLO157Th4U52-FlRG2$oe7%p@XKw;w9NB3O!3U9{K4xIUvwwdijJA4 z(q{v@9ToNiTuq*8@f)65y7B28FsSuM;f4+i4$8@};0s6gS36mrG#9VV*wa18&GiWr zTB2B^Qmp<=hCXD9ZfQH%FL|KCf^ASHy(%?vT4XxrWK(Bt>}TtLd{wA=lLW$~*?k$E zU^1<`)3Lz1SZyS<8o9ErsaaPclw}71XQUwo z5be|MjI1^QM8{6a%9%R-?Mf^M9)kwW>xEx`$+XD446(be`TW>qw0?KsFFt%8P-UJeXc zDCNv9Gen71#^pKgioH$|OMI2L+ZvdfJ?iu#JIOz@Fg4TgvGUC`3BT_T%`n7mCo#E1 z=fwFss^(gC=HKJhNs^eMvr&kAu?!iu(r#%V*Of0+*YhX;N46@S_zTd}qkUpJZSDL^e88hC&`x1d%$NF7!Xo@c&D6Z`xnyEK|n2`8cR zTBZJTtVd(4PTW(0PIzOktUTZCIG`ODg`i~P>D1Rv&n$jO=p1lJFAj_Cz$9#nnf#zB z7^?H0^b+rlN3+WS$9nFeMF~uqrD?Xu5j+QP20e1ei#}^;DC=ROjRt+JE(p*ST&sS6 zwveu)c>!8Kkmn>N#Yl)OJBFyH=JLimmvB=E=yiB%Wv_-R3X+vH#3wvg{{AJ!zGOD& zXnE7c-}++e@=UHU_nX?`~NH z=boEA{kyQYk1`4jd#Pfu1F~q9=;D-JCx@C;xVX*vaCh+gUI)+mukffH)--%CppQ%C z=PiyYjvI6*j5NPQsm!r>^ntV?f|xzl=vv`j_HzwO1W{kGEksD4`O=9Ky_4*vJiA%VEo0m`d%tK^-2d^19-p7$L@Y`n}44FwNqXMLvEh zf>;d4Lb>k&mR?4Wg(O3jZs~ev-Q#l+GbyEHlmW5^&lI%N(}Zn|jNr?pg0!6GwYDbC z(Tpq@8p@PFePD#J!%P}?C*GJXhGeEr&c098MrG}w;MVlvN|HflBLUHJMsOLCBXr{3 zq(?-Z;^$h^t_EkzClP|x49^p&ZH35527rUvn9ehNUlJ$FkM`4A;yF{!W~zw}qd%KG z`aT))Dp*_n_@kprk*y+c4ir~YB%*eStas<2Ob|b3jMa?7HVjp>*BnjW$yNgoljWeZ zl-b)iJqjy(icMHeWn7B6M1&J+NbOrUo#A~bdLclZD=yi;q8ixA%pz(A3q;1RaF0@- zG=@#-?zpE-4wj(749bqOO&s~T6e*oscnZ>9Ym`gbZKU~A7#$~NqaAEuL{l=03$V`` zt`v!@2Ic6+Q<0glAHl6K%%z*?Ci8%O1F-~!iFsA%@AUjKJo?cCbX!Ha#DPV z{AJ!;Z@4g^3*_N*7iHP7Yb)d4mY?*wg9vHzaPYD9!hEpR$qtjBld`{@J=ZdEkSsPR zwMp5plg@Vh88L5Wl7r&BUXUcgdK!(=KL8~*pE`1f3{EWf3Q&;_pd}u&C~c{>p({Hj zQfXr|W#-rf&VvX=bj0fgE$#S~48Nn58rvx_g8(MIhuM+w*n;GJ@XR3^2KFP6RLp!u zoqv>na%pCDBP16c(NFwwj*IeOm9Eu z2(6!MuP=;gnqZ6uEJ-9v6HSLnqKW`XKfaQ}#NAjBVtds0<5fbyN!F!=7BlYjoUFm( zjqs4eI`)sXa$$RS+tlSRmb;V7F_Ft?4w9v0lJxB>omtS({qDzxwz?&%jG>RLn3Ouj zngcPdp+!bQ>DO-N*H|^Y({*DPR*zFw4`hw*3bRMOHT4im_A?$0KF0Z#Cn(`D8ponm z#IN>TbP&#B5<{bVZ{7-mRy4RnmQ!g5(L7twUwZM-(Dex$SDji97PY5Mw-UXo)X-iE z7yXVICPqJrlE(c&sB|Pw91X4JI0HswwLUFT)5uA0wRaIJIK)m|qJJ^EwgF0*3Aem0 zIbaohikjDy+p2w-V~pYaEhfoCC2YWQDRW6U|H3HEl`RdJlV-T^LmdnB;f5%^XyeR{ z<$J(%O9-7zZ&Mt#VT?S|q~aq2prD~*pdsHBA?>c;Ru|HWLPIAeB4HAg$9Tw0%E%%l z%!*0ICZGUTL|R$cNFo#rRIl3RCwjfA@}hwG_a?6w6nr1*2!nGBSk*z>>C}O#+tiE_ z3=i)C#>V<_;v-VDo6?|TtbR_TOplCTdbWg`@2l+fOjKQHETz|+(zP|m@lWlI=WD0}3PIYpi5$-^;3|Na1{ z2}npdLvCKoKe)danAZHmd}^Idt@(-FX=b*k!B{7s&du~}4p67K{4izEn7$9fKx4hk zSymn-qaqY@LVE1Q=bRvgSIre#dp?alD7AxM>3Ho$1u$ExpQC->9%m(ED90v}8eLS` z5b^4&*umRtGD2rm|GS$VAJ@J?_u)Mtz2h;u_6YPwg(o*h$`ZRXHy315!qlDX9%v*# znfkV=)nBo{O?jbdtDfzM+1&xYwv>-A5PqcG@hc-_oF*jD3AHZhB;ri;+nX!{(6SPC zXL_adQ8~!$IJrV>P+~`irOEloIVVB5Ty*_)=IAST1&f;34iT9~$z)3j)W-Z;FlY|p zEwv+u({@#+;V3w=>N_Z|asjNTOntCWsFI<%mcIBMRzZwiRmMTX6TVUNH+2Y^Rp=jW=qUou^*|g~7JZQ($-W-urGiYBMtsQRb@1Z~h;NqGbo#_g3|7+; zD8R4b9&2Kr!BomHt<&>p^V@TJ7m71$Hc{{h=Jc%XJrNH)pzN%wGL216$L$m@Yq5d4 zUEtbtq$Dcm6Y_)W<4vpJYZ(QVNo6+YmLFd9beb$j$;Qug<_5Ubvg;46J}^n8f43`s zSr=FpIKk6F-9-_kW_@+ukuZ@g!7;Qom=RpwQX(?V~k3VQ07_cTHE>fyvqjwSm&gig=6Iqw}R@cF| zn#By*pME)Jvw9@dk_i(P+quS?Sn9UP8Mw{ddie+Id9N1^&XKt9ufwjsGxon9&Eq$; z5z$F>FcKB!+K)9JK`~re7{3ue!@lD_goW$qexB*IJJP;+TT>y-L@sbkI6-B;Z?$AG z;_{`HytnF6OX0Y$LrFP+{)7I|zSN>}rFX{4;)9nNJVD1woWVWuA^nPQ4FldKC}cq{u|Uxyuu`L~dfJR^@KJaID1 zk?EAMKUwQ*YjukB_m`PDyD=RhC$*0fryp`5%t$~+Ns5etZ6a1VGltF%&#^G+oq zYou8dXU4C|%(E%CBZK|wTO=BtOqs@OULE3P7y+A^fO%}**I}}ub#^yyiF~Or2L!ZD z%f<&tRhscb2HYM1b!5nPNn*mcbjPx^bjIb}5)EvrD7Pf&jOcm&woIYKH*SshQY2(L zr4IXx!IWyu;!5la-!|v@;T1kA0!z^?>>&j+Vh0{=2xCPR$tM#wDaSwPl79OtzO&?pYL$rK|9bq8vH;b>^gU}1)n_wHmDfhr}XY`@vW<}F!*!Jt(} zG%xXKS$Aqzl;t-4I%Dy)t#Mz+qhE$=iz1sb^5Rkmj%VT6;y{QVx|@ahvH9kCUy2o7 z#>^tN72Tt5X-|PosWlUplh+~)B|h>dPp`ak20WfbFY3<)c&c}DkYjXiME035>{{7f3)_N^c~#g4m=nbpTo)UJRopk+cPp0evhX7MK4m{M>rP)K z$P&%G=uEfRwaKP=z}W0>M(Qpxz*l_}E&{2kp5}RLm{auXMbcrgrQwSia!Qi?pe%`W zU&HO4yntsFfK9V!kMm!8JG47=ZkXVi()I2ED!^L;F0)kL}7%dfBf`Pp``nSWr{yQ7T+uXndK=Y=Vi?)UqiaMCxyOn<%r|@=!LFlsmwOBy;3iZ@6WZbHG?k?{@mGd z)=nrit_&=)SIaKSHy1d2Y^BjDwA>UIG3vQ=^^O?g;S>`o!@ygq)t?B0Y+S=OGd&HD zTToia$m8KPKc<%Q z7^mud#N4K!p3+nFjsWMPO|sNfh0*V^bQseb)>qBO6je##s~UM z6NSuMGZVuv1gwPj#(eCOvvv@Y&JN|$#j%{%B;~TA#rfBSB}>~{OHn~SRydG><9?8r zgMt!sxU@3#C}P5L_8cN?aM<2A{7po+ePcIm&l;C60vy!A0PfSK87&^CiI=#fQW)0Y zFVix+2e22hq)H^x>*Q_mTnjg2(NeAX&yvv`8ry$R7Z!o99KOJa(x;W;@O^+4CiSBCwr6s)NCf$4Ao|CE((9Dp1FQ$8s}0 zw%y2VVkVmv(Rh8^BDD&&t#hB=$oC4eJW$(Vk+T9F!c<{AVwQ~ROT{lmM{CnvTA?Y| zH5P#m78x>zwP(5$h#%#9;g+0pG9dNk;WJ^>_-1txK^7Hy!!w~2E7}qa8qp7MaBr(; zx(5waW#iP|RpMiqrJ>0U@t84w$r$&nNpVu9DEaN?Lf}apTYE z^EtA`EDCVZ)4=+I3$KxHzC7@n_}HcTx=eB9&D^a(310V`FD4r4p3<}>d)qf;dgtBt zMWk$Sv8@Wuk@f!eJMDeJ_UfbqW5LfAbPP8Lgq7gj%`8-Q5$r$OoY>}ScY$VDvOMoxC820q)ZyJAmI7~-r&f8|Yd_zy{Bi-L0 zU$|yK!E$gbI11q>8aue*+|7@};)_!mlT)oIx4#F>^Q5Zqi3~}Dc_GW=c5}6~*Uwm@ zuBB}a=ruDXtnB+$$}u-%jpKNKg&$j52*hf|2xH*p@fw?r9ZeaXdu}@;UN9kTyk*a2cfvOD^df2Oud*eOlzI9Mrp#l$XN zcT)H{@43_Sv87envEV}6W!4qma1Vd_iOMcxE_kW&ae8A0$;gpby^^;{(U6s>rst_c zAfI@PiFTZK@u=BP?u^tLT7#Xr3m6NPQt$)&()da(pRR<=$F`HjpIi#0>7@w*C_dS~ zN*UhJ9nw~`+w!KKc%fEqt-rKwXZ+BTY`3DsSq(>O)(*Sbu4E&TxQ3pKs($5MLe}|h zRt@)fTZ5P~;L!xeV zvj+*eG)~9ryP9vzM5%}pD^E$&gt*NtAPP<6nwM%)$9ZD6EF(u% zP?x;iS+42R@>z8aJFVkDut#s~HDd;zBIe45<6>$YYXq<97yX%EBUR6;NVkRVh|5+B zupMa)3^|p<^qgZ)i zU!6Fr#GDcH=F)`@bOS5#92*v=Vn_vMeq#Qb!ZM5STFyH$r!Ll7swm-4)iD;^T4mGQ zjCnIE*1?pT&|G=Pq`v2z7+&eD;S(2JkRT4P%~Mw6;Sjsa61(IMfON49DI0S;4iDSP zy8YxLNhN)~oIt_b%6c?aqXG3Nt}fTn0KfV$%U9e}ywDSFlwHd5G)Y13o6n9&rOS|U z{IBTq4;=dHZ?LDXxQ3Ung(6vl^rkU=YCZXbXc$-Kk?Z?xH!+XgxAa!AN=pJWwr$lT zHOY~m{=dSa#-(BB-rTj;Q&~Fj6T-)A`M@Ma3;V{CgfH=)wD##V8|gm&N?A^}wXWj` z&ldN=kzLlIb(~w;x}>Tmv4W-2f=CzDIACJV6`Y-oFyZwm86bQrWnd+gcpTY}=t(aV2HU4tN?Z z2$iaR6?2U<`y_3HIS%?#^VoB5P;;h2J21{d#@;f&dn$Wf7SC>GF;K#e{o|~COAZIP zt$4PC)glN-E_Ih-Gf`zbb-BM3c!v1OtVK&rpI9Nuu{GT!!%(5(uT?T$d z?&HNwbLB!TgYw(~J(H0&D7E6Z?0xL&dhNJeF40vbVFa+zj9o50P{POl686~XT4apv z+@Zt+ymJIxw$RG+`W449@)ZW1?+GhSTZWp`%^qY4)pAyqo9v9-+E^=HhQ9n_bp-KO7w09VA0FH^X|<}O82#SenT5z@irT~k^-r1=?H%3oV2 zXM2LHf^Z1m=Ch<;2J%IoJP3gMn{fw#?Oog>NUxYLii8RsewF49GV{ZwwactXtU2lu zarKL{P#qNwx;V}pE7jbs4=LCm_wg5X-378|CP8-6G*Uk+E%%ew{z|Xh*W65C+ev7H zmNk(W9(08gWOdK8=amkWk?^xMJ#MnkkkzEgC^>CCs59*zuVZMIGh{O$|A&Cj0AbACTPQob@ifgLx&8ewDG(MF>x&3z#$v z?V8n$x4|kJ=OXr3uTGvD2kZOBWTg5HB&el2Ug{S1ZU=50CdetR%*I~2GatZeX1j4q z1C9^e6sl=={H;foS~Tob5!sc2#5{F`e61ecTF@QIVk09G^$728a^&g-(W`!xwHLh; z6r()+&>J$^1NDyM%jwN=2~oRMXy&MAz{7KB7cBO2*tewAB!wPpMLmAaiiE_(I;P(z zUz1+x6?Aarcttx(FKSg(;fh<(CvtVC@+>aQR^%4M5~-Km(HtHivPdxSnu(V@fMydYn$P`mm>@NsT5H7dN`ii37hE%YqnZKlmEezQJ`VlgRJ>mgAe#L6@vU75W( zcH+aHOPqM2TSX=2(RgBiu_MV;uk>`5VO4Sd+&b<`Hul`HHa>+bh$Zye1#cYr3-Rkc zcMb22;xDrR7WPl*wrAA-pNei~)rXo2mv-OMI5D{^&sG4*##pE{^k?->xD?B@lZ=){ z4I?s+KNamNwpJQ1;#@UHz|MmR%tuKw=RpM#ZKLez*d_LsdKFDUNu$B3r^+m;dZcAt zq|Ff17t-~G7+aN=P1>J05`u9b+?k$jZzSbM%vPE4rNVBAtL@C?;u7%-QDfs{$;7O_ zNoTPZuahKnk__6rbl}b}zlrk594sksQ#>oFC_1Dqv3!{RN+Hc0sN=~dN|*VGet5h5 z+0YKS3agW;g`Z;!D5?B?HnK&jbu2%NL!ExAQ2k`&r^mT>0AGs+VcCG$`vm$*H5E+< zW-Tb#WoYS5hnyC(@p$e=hpoo1wXDqO3UVo!Hu~U}*0mIKQu*6=*dLC2|73rW52C4+(*m?LcxJL8Wt3XfFuNa~v2gG+;pdgufX&dv>* z%e&RA-H3N4p0*->jjt-0W?6^@NG6s`T}iZFgDf~W>l%@^7Ku=`(yBWPdw4{pNs|;} z+Hl%VqlyNB*f(e@;auumLR?9{^~DIMA8jdlei=;Rrh<0M(Zav+RavTeY0)`;H$m{U zcdb*&(wO|i6W)S8ro1X!2fQ*sax zz%v>%B&6za!@^#sS=RfZ6p}zYni{r9*2rVQdL+yIwu6#KAueOOgYrmtR5aY&%)z# zc*_lYTgwH8Qh231t0(t+U*;HSlT;QuhE4kSjgD}vFP%`0i~2m!@T}Fhu)Qn#Y!x!S zH9)+&S2r|7)*SNm?LfyAOL~QY6Bvc~Nyr28ma!*q= zpKnMn%|Zm)sz7z-HDnqWo7EY3ntUOgJTIGTKMAr0;IkAS=YEve(RjJVZxGntO0|3f zNsvfYuQ>iSZ5e3r6113i5DAAA_1RrQH2hj!$F>xwes}k9fi;G1Sl+;jd)op^KrEduAyEmyzE`yVNton zqEBzs@l|ybXxW?es3Y#wFiQPr^g^)AK?3=A8`TCqHrEA``VHfdSMP9L`(|nk5tS}F z%h>jwHdQ)2!fsUBfo`K}JBb!y#a>CJn~zr>Jn)&`Rdnvp7txk}j^J*&dfRHK5*hSp zySn|IRRTpgkjXRyf*W55c|!&;3mc)0{V1^eWbT#HCq?@P`u7MXYT2jUERh7%EcC** z7KZJGhZ*FKlxzNMd9=8gvx!tZM|=D!Yc9ohq2P z@lx9I;h8W73U6&?u zFbmC;8-vHjmR|rldbgw>uHg@^eu#its`s)_xx=Xl#b_Hp;MV#iI`fV7Y#THu$+jOQ z;OhSDiS|bLropw7&qUQHcj1@9HXjJc$eZTwzAE{I2ha2< z0iA=%WBb725d_4Ct<&~yc!qb}MhZbP$)OjVJE!ftZq+Ci3*U9!4`VQl%Pqw%y$uK8 z^W5YtZ}XFdne*+vAE`ukh^#>@f}4# z;`V(Sc1xISY)=an)r2^vaE^RQ$QMc9w-g;kFABw;5dT^L_1y6l2QoOien*QUUFo;b zong@y%9C%wcMl6vpiEz;)y!}qLYb>-OExb*bM&>U+^VnWjtoKVoR|?ta{xb++K-G$ zx=g;zJPgOWcf$0SG`AZT!o&5w~GNMt`_6L z)M4%JW=*@##Fp_>P6$5Rak23AX=A;^{%&$AMMMXus<1mt3KgkpTx1yLoK=D@H}5)h zXqBjSX#Aj}m8_re^XxCVfl^4>j5YUsf8?)Rd29jYT!b^A>s>pD|Bx`MpvMh=CXt#b zs-vXu7ybCf80pELWQFv>!;r=+j_#LXdb>-M)Suf-X7$`AF0LPaVZBm_uxLIVOyhc6 zl;A&xFOThURUL9*#MD;;sINeY)NKlPpZG*%I50uDH+HuKhH|MqAQ663zkoE-c|qlu zU9Yx7{g;Cu9!VX1L8VNjFb5iED}=(Gg*LdfuQ%g(5~-`MGQ*}}fX<9ko=yFl$y|yH zqc@6s=ZA5*ekEGJ*eUxD2BW;_J}Z;!y4G!Og}SkMPZKMZ2j_-<{5d5b_PJrKz+~y> zrv`b`OB-AYwjiOVizC`vY=(|KA60rjoaU{WpPYkfQEVhFOeA`{3F;N!mI5X1g)=|c zzXM!F4T#Tr_3P>)9AcwnK+@Z8_!xL@HV`}b_<)0XxC2MV9-uHIkrA9a% z5us+CxKlX0RQA_x_~+u$dbe*a@qE#!>w{wMiB95YZwz3gK4x=zPTFTsrS+^RGPV3% zsqK)Zs-yb2?xp3XkhmYDN7o3&2c7ogFyGf!b?W{OZQ7Ma?mt8(ZVE$b9-#g(dkTkp zTv(oVuT~k7>+WUEQ%j@Ix#2p@PJGFdU%Xr7&x$3jimzk9Hb^WscoBF|YjSIx;XU8c zu9yofR79i1>LsY4k3)Q}ZbJ(PL~oUunZ82)?r;$OwLK5*ubpncV;d;Q;3_hzLF5sd zQq=VJ4Px9u^$a{{lr$sfAs^R1MAA14xv%$9mgcFHy=xX)||#7AP?0nKOtCXe|e;Ntgp}JE>Q&r zTQRm)JI>tjumK;vVGbmAevY4$=gz&M0xiK-eSY)I_K~KtV!fj0&?qV&6h)XRjC)ev zjw)WC?q#vjy3gywRPBX$QT4-eUx+ca0WZ?5;IqMigq`4QF^tr#2%ZlJ!ao!f)kdsw zN~v-;qQ&Z@lCm=ss`l$9x9DgWBjXU9E~0=s)8o^Y(g6-voE?KzJPvfIVIeZX*HhbRT)_sFX>s#UH@M1sQ6;}1%Tr&MFkt*OF#ab&Tr19eJy{m=nI zMB|AF5qEUzT&&`<9ex>>ra^3XtmCNBK$4exT~gyCgRmVdtq-~ERdl`UY$7R13Qa#w z$ES<1JvZXgz)P8}AITMbw7STAXhK7u+XxYvWOHpKuPx$_iySbz*}DZW^imZcyeOezdaA5` zMMVRoM!usGiy3ENPE)9>N%iO6qV!}f^>ME4c{GX>=gON-)Cf?xkc%J+*X@4JbUUlS zts&-ux++AlwC^_C>0jusTj!@|@4VcWDW-IVTDAM6DNRI|lKBk>U7pU`ms%*+audAa)ijSNMA!rWx5S6{;wBU8+xSMpuYt zctbAiB>gs3iB7)bp?#xWk&D7|hI)7K(-{5LXC>>^K}^RZ4gTg_?Sv3*1=d;JwD4%n z$W#q&+E_x`)^86*cg6Wsak$(^#?mz(Ty;eN2~9)MMJNYpI2oj^C ztH8B{ZnFBPW=hZE6xL*C2NbS~a6sAdO>P1){LY1wgChfNY=_VGZ2E)94|x?-9nGG^ zwa4@tz2lJ%k5T0=9ebPfJoX;YuuaIzGg~uSuleqn$8xO&*HP`;W~v@WgD5w{gka>3 z-y8fdnCmI%^s4N6wI*4Hj~^tbcd}NonWvYU6|~>t`I+7*hz)DzqF1IghD)zBCgi6Y zEWKah$QATok!0;cPcbp%8kXXt+);N`c}t`Y)PqMge|gyZe64Yv*Jgx0CVpuryRfOa zM|F$T`1zHt9cqAJT`px!`X=MfT4NV$s&f#g#iFalGs0hkG@J;%RLm!g1*Um=Z@vV# zKI>$oJhiiAWx~-Sqp8fj2ONCv%x?2aBgU~wrtl1+yrUE9@Zn`!OVeu`m%uSLNX2JR zc==MrIPcurArAo~*&3yUZi$mxEL|pZw#xT@-%L}sR?bn)t`kbxKjm2JY)CYDb;CH~ zDSRBneM?$Ikl*{f8(q|0iHQ&OvEp3bC9zF;p{60L`@&P=_~v^+C%Zw`Fse)ofi4S$fEbq8_OyX#!ljoQbm`wGT~Sg3h#jr#9h3l%a#z zAZ&Ny_&LRv0{v-`aUYz_uu1TG*re?fv{9LQW^s4=3Olbwa02u+p#?^^*fv@@ivB>j zu_&*PB_mwYRx3xy$BRbHZNg7JWDy6NpHE~iQINnw#98Hh4@f%JB}tL> zxh~uUjk84+$C6jdKQ_ySDy9p4Y5tXR!n}X*x^EyHG9P+wq}6fSL(u2bAM+%Ol{vly z4EI+UNmh`i!zAiQJT;}~0F}CbI_Cl^s~SxAsfPVhT-IdKF*Ly;OR8k|LvLokBjRlM zx`#Pv!hgm@W%vZ*cv^haJ+7@bsFE3JKQi+6`kZ}W9D7Llirq9Kg^3@B0N-)c@TG&m z)Z*hiMAlN03i;jsgQ1?ErgaH%eG84*3!g;;mxGB`d1xStZ7E*hiImE19c;=z(mn%ObkVRF-!Fc@?=y{`~W&;GieMwROYu;Bl+*p^!Kkt3vbBP*j2F^ zlN;n!+sk!OMARB(N?;>e2@<`Fc&MDUx?K{{WGxD|isc_%Vo1`mcOZEikkSvQ8#v3m zajeU=X-=QG~;%48%b<^2!@L!|*c=8jeI#IqGNgqyAsV6jn_P?AE ziHJ~G8FRtSG0i<^Bd^pxHKn67Ho*y`-c9od8yUFkpqaWp?33Q{kI(jkjRoOWSfgs; zZD{pY_I`}d-WUzZ0e#D(JVQt-j}kdzPaZ67u_B5)*!Y>aFJZ=!BKv5gCyjJ(3~kY~ ztfk)%Wqwe6Bc?s9TR~qNucD>;BFlc1jh!Q_1i}>KOgMAc3#d@O_|AWD}g?)a(idWv8e9n z7+tM2%skrF)USsbDL>)7%=%7N`S>Zu2aZwMfkA@oJ_FVT8+F0Qf3UZ?l{@N?)+ zs#|Dh4y7|h0FMKWT$=s~4p&2^17~|5Nqpx;qgYDHf%gQtqD9IYWg9JUW<EG8v#F?~KF2`1%p{E46&u$(02jei)W&`GKhE>lY+ z&hn0P=+}^!Yt;5emMIi5rQp01xmX&$@xK2D>bX6lg_!``B_VoQn)jOKl0Btm6$*Qb zo&oOXn#qYh=8X%Pr$KYnV-~R{uA8g-Nlb-a%TYYam&+t>??@eaCRS?GlS_ZLM>MyN zt=%xB2O_aul?ia(6=Vqq5xkLBewm++Z_~CZ^U&iimuACGSu4$#KH40oSxom;x$1#7 zV>c;BxOqTp1H1dL=ftLhydZY7l-iL_EEbnF=Mse7)vUpCW^p*|16dxuntx6oO1Ii8 z=?B_qI_K={v}F+d!zhG`r0HCG9vjok<;|=9Z+$JtALz){n2&k6`ZmfbtJQDB-qO!Z z%fB=uHBf++e;8#=Pj{a=(wR8|-_Vd%^`2ZRGmJ`Px~1VQE*W0<%_zsN`232(O!S@k zR;SEHTP;RllP-s^+vKH45<}8O4gy2mnxE2m_2~}wcQLx3>9djs#44qHV$zes??04$ zsC>FKtJ(A}g;ba7SZwXRH>*kFgNA+HM1?m*7~O{K?`s3j2P0b=qA8mVYfE^FZ3ZBY zSH&E;u^YcIT_qJjVyl7%G*Mxv1OwuuMcb(x&q^S=O(1soP9}*M^o{ZKGGe*@Te$Ys zbDRT(4MX-k1@>}i^C8~#Od(4G(LJCq3^Je}&J7Nd#ldmNtsf3~!9pd(yz5T@!s58% zT5(Ug2k21b_0B}PoQwCy-;i7Rx%inA#m-g-k^>19q6uSFW0Kt2#_=DD@Q8D?h+{|4 zgzShoPNc>$O^!I+P@st3JVmHQkU5d)I&el#=q$5-v>{!LR)j0^D=0b_hT4kBHNNV- zw(AplA3u%$U}i}=*>m-^jPsPm1ej^6 zW6|z#09{F5=lFpa?*}4nPyKY^>4~5SrNsf^XP)a8$s6pbUhUyFH+}AyYXEOi(ckg{ zR;LZ!kL9X{iefsUA0N%>JwSQ#vDL$b$8Wsc{eU1sUNSo}R0^cg_&&v7iy6+{z@qFX zmXgT4C41li?tdK8ZPxGo+)!wTd?E?<&R);OwF>E*Sv3wM#V*(eniyQ-&7Bl{ZZl9^ z7!p>^5L2f8%e@1D*5p9A=(m|mA;=nWJ_nsfY$H3pyHhQ z6O>2WI$?FhhZDP^2`y8N9feH=kHWpm#fBXTq5*8q%y zfpg{2cPM$*Vc6VGy(9LMa~_80;uY}+`Dj)dP1+$<;|S8}WF`LduPWr$-oJMC{N4oom(%G% z6k7Bx1?hGE@i>3)=pho>)3Vv7ycFo@dg2s>Wjh-fKo5%tOn;-cv$J}B7DfrPRZoz9 zYi%jvqv?z~qjnGQyda^&6;3(Nm20hhSQr}4QtuZbiv?s-%++64%nH84%1<@-I*!F zFUxNQH`816__!Ox4PToGQp$I5*Fg;7yF;@}pPLFE7mYs7ENNxL(IxoCW(ONjQfphw z@Mm?^&|#{0BIQC@i-MMn&gILHDZ6$#)^BCMQ}ldl8|x;@!fE#a`@6GLD}*g-q1{a} zZgntf4z_S9xg8Kcg7-A*>&{O%j?H+!mx`ks=`0D~Fh1y{6x}S5S^LN~qY}UOxxv|% zfr+g(h#R}qT&|^Wlk7}ur9%SIDg}5iZW(WFS$Jz6@Y*mu$xo4TY>PC%bwUdRKAD<7 zo7>@#7VawmPu{YKf7+U3(>xY1ea8PyQ1k^a{!YWt#;~k)htqT1N7BwPTlSIO)5fS!Sdot~8i zDcFzILLAWXlNS7lf>hJc+cS@`r%Qfv@2*(LRj?NrmL#o5!#GYlj@@*YjQt|11epM^?qE+QSE28tyL@(n&Yy)X{D`=3$#BTGC)w%h^)aC{ z8l~*&d2=$qUcn=m;HcVA#ZYaXDzH6N96apO|j_l*1B3wR3)W1Bn7rwmQ^PCjS zMDycf;2yfd4%%H(mOqaQztAzyn7Iap=ypmH1eWI?x^#OKE5{D}O4>-onw zZ}TFqp+W6xpY}F>@$vWTjQTR2soMpogD+5hNT0=Zh1+Rs?unyaFpRrzQ=fbu4a}qN z9t-H&WI6ekPTRGOc}esc=#TK6ZmSBp6}2epYARs!Y2}iAOlC&3LyVe2;|= zcGo}ieNmd;^W9xbo?K$vUK&ou=zwj$?#$e@r`M5s;;KM@&D}My%J9Us_Amn&)c!3P9?FpK@o88L>Hs*v&Dpuj-_q|; zX}26L{hSvE3slsPcv!*t9CGG)4|@(_f9f9VEr>4d(dFBWx$M7Ew+=^mS`ICdx{{^{ zNu#}#*X$#w%LjMK4j(N&gz54f;U~|QJYYWjh0G0s?THrJ^N!cJHdb6+P{kXNwhA0#i-ao255?6dl?IRlYn))LFaksB9RkQz#D zg6m`FB<~~kaU@f|$PV1dH<0OZHqzQyG1zI@YUAwPS=ydEA?qB$0N`XfWHFr`^nrT- zTa72cVTM3wlg1Qx!gv{+Ekfii;R(scZPnA;gdo@@Hz)CM*`D>~hf58oL+zo+4%nXi zYdI9`xvhp4ueP7>1dW-O&3E>kUSO9y{tS%?H~mV4Pa{TWv*Y6Dzqbf<-Vzl~l7nA= z@BG1{;CJn(qIB}mX?Q0S4`Txu_bm_^ww}hNZ{QR8H#Jvs#F0SljP8L=CqpMAy%os)GnCaz!b{#@N zqv!aB@LU;aV%-b*T4x!1J}-I_y}j3{Ja*ACHaD^Z2<@TAv4h)dC%l(gC@_D!(Sk|y z$hO`v6s_U-LTRE(ytU=y-t1=p|ovE%KSKfn*~kPU~h z4-?)jt{UZ!`%9otToZzf98EoBW)@)})19+*$OnYrZ?Rck{zIB_MhMQjQ1=1EerLIr=)8C0PzG+zgQ+p73Gh8xp-+H73Dmum*!+(fadn)G29r9?l(NjaPZe^w-#~pz)^71Sk zJX}ScwfiViSb;UuBLU<2zyHJlCJ_Mu0s;a70|fyA0RR91000315g{=_QDJd`p^>q{ z;qcM%5dYc$2mt~C0Y4B+Tk=e^sJ50;P++B{`o+LXL|TJkl^y}dK9P{DezoZeft8{# z&rln3?3I|IQL7I!_#;5R;s|B+C}}e$8z4oQNSKZ&aW7*k6yc3!x7C8JBb{{&11~G6 zH3c|eg_&WOcQAsF7kP==uCV4)yZiSF;o*H8+_OOZLA;Gjm0*Ix{6(Pd0^?}qIo!76 z)YLuhHdadU{fK~-7odntL});rtT3m4`2eA;%2n<(?iH5vFMMTCbNxxvPM{nj)ET>(ZAdt$O#)Vsp>xrLQSRnSsG-?txGXeSuTrjl;)dToIMcdP#X=Yav$i=r8 z6vr_J7?jS4E7>YKUEHlyW(r@3g}fg~TP|im#1U;vn<$Ih_?>wyz)HdC0l9W%Dujv} zhI>j1BZ6dvET?f0K$r_rv2w={iS8Y19W33EftDQ7Xf7!uy# z4dy%>lm==d%X2wU3Iw$+<|fv}1|V8x`auXak7%S8$6QJ#8%yE0ai^!pT){a-?mcUK zl9O%-tO!!>16G;bUnRv+lLWNqGP2enxUc3aMa6117weCR1$2nC zHz}cIhM{IBl6K|=UFHC+m70Nrrs86%1=?A$+zrBP-LlR`pcEO6%((QOLipSU9`_cZ zxr%p>Zw#Xiq%>~Y=U$N!lFs2pUgZpCWgXN=D6%#0EEi!G$gIO~)GOO&UCX^f&xa6H zE_#CiHQeDi<_3{aaWS(IYNE1+P@)%O+%EGJF5SycqMMo*7Rnfkre(@84Hfecqj>7! zZZ_ll%=fH@)q*^a89d9y(fN6aOKMRsa9e^Dg$h<>2P_iaP}I>mD~U5# z69pkzQa!@jgKl*YR8k%gD_SKGH7=LK=3W3bm})YYdeZ`ZnPHT{qwOh_N*a}n!h+)r zrG{=MW0oIy066r$LhH*ndX6hW+oDm5W0_^mR~-0=pyhnjK`CHy_wxah2@q=`WsF4v zl?@!m*wkp`%%Q(AsZ2yW+vY8SwhSvUC_C%Sz^%gNDkw_>gmMi{56`&kXGTd-bugFz z0Dl)1gb*V&)J}bfECI48HBd7|*-&Z88=Aqd@(n}7sO+zfeP9r0MMhcLeJiP!8(*|p zXt4doEM&oRxGNA#Tt%y99+SA2tQrH4cB)|Tuei@ zV5>@G-u^=Sc0`%5}r{jLi>Kv zdDAKOi<9f$?l(;tex8z)tSxf|FqKrMVXfjcjL}_9$&`aC>45(Ls1BIFL6={~ZkKAercg<=6h1fmpr|%n=Gv5)kOOGn#(-=XT z+#%h-pt^wb0BZ3C)f&5wqS|t9pq(nezOVwO%)$yX^I5n+%v*&hf;bq8n0=uHexR3O z5y**f!klV*=TN{2xmcEhDFBOs09jB4cQE=)8cbWbfSWZTOm>U6h%Dqkxq|yeIEb}D z_435SvEEDeIF-GT^<`>NaSk2#5m~}&DzPX_2(5@9wZ60MEiL`(EJi5e5eV&=8ZEE( z;AvAS0=LWqOmQiQ^58g^Ho;6xiz3~ZGOQ3?oyQb)4+@ms;#wgP4$vT#8k8G~sW~lY zQ=YdM1A-y48B`Zddj9|tsEhBx<{*p5)Rz^V^=HmzS-WO!3@5EXyg7kbOIYhMF-@2K z?J;porHQk_P}PlUEXEJS5L+LQ5IJ9R1>zyVEU9D)!YztW8Tf#lR34-djTqAmXqJq! zh1|D2z#5ncWrQ~b9;NHLmLFbW+IN5C;6PmZrNExg#H4n82romQFu8e@Y#HeK;?qn}-+E}u}F!O>?c6vaH zmk5Zvlx{AOM9GOmZ1+5Y#OM*Zc#pYRWes)j4z!?xnZ}9=dWyC?i$i;qA@eW=TtVES zXHzsLZ;3i5KBA zG{Wc8sYMQ@Gi8e!z28!`LddHEHas8}F%P;UmBh7UuW5snIII1b6^yY8bd=baWIxE{ zsDC4=b_a>4GxG$C7r2@w0eX$laR$=jTd9v&c$OmB<24%Bmx+*B=h6qiqBxr2mtvqr z7jiQj7;3A`*>y&vsyjW2SywO@`Ap$3+`RDvYICR^DH;%}h-?Wix0yj0Y?cC{X5gCK zRvmQ%Hm5%jJPgbRAvC}R(E`2@hDlJkfvV;z1O=slsk7?}O2zlg6zbl%sJhBa{4*U<|=; zNc2)8s|iRd$Uv<={{ZrE>`MY+@d`zl2hZMK%?zneXp}Jj0A^SX9zJFxTY#YfQ)^x& zE*yV-U_>zU8*Ot1z-}VarXFR*IF5%Xjhh*SHMlKHD3BWQ7W&Lf1Cfn(p7F%Ht(tY&?!Pfki+-4c16#zQMqI*V4EUSQY=IQ5Kd}%YY^yh) z$Nmu1IOXX5%Lq$hpj_$~K44<2?Hh!=U#vhIDs36KA)AbBb=(kJmRo+X8*x&N6I{V# zOhDQId;3b2xYQdW-AJ$>Ih6x&&>tA?Ct6g+;F{x!vnVrulW`3y;sKk8U>R%9WyRhj zPl8f$VwmYy_W7Aro7BZ`46ou3Xv_8?YPLF2c+|jEUJPm@0ORi~V_#^A_IsFxh^t3S zh&7_29FYLa0|-?_C@q;T5kg^ru)t-Eb1WE2R}Dt*1+l<~W*N~J@ex`=Ekw|i3+go2 zxKwfKDOr{}F&{F3Tz;`Y&Z0L&K-hES_k*tA>_uza3L_@l?}AV^T(Sd06;#NUGRdg4 zvmPTsrN3K(8LIxwZw5-tZz;)e@dHDdfp1teY9*pi{Q*pzgDvSYz#BtoU>O_We z<{x9{^(;r$HfT5JIs8o)G|^drZ*yW^r3K#bs#%U;=}e*`(kZgTEH#ZwK*Dqp^A&^<(Ub0`K;Xcd1Ex*2!}nybwA5gFIS zFJR2N-w9Qy>$cu zYVNpYR+vMDue=5yL70XvJ&33p9KtAExVTt+M7p!6Hs9CQFz9LSQC8|JW^vRLY$&$5 zj^K-c5As=i&G2TKYsS&LE)TQk2Vpw*w)#0_~D&SvP$8Mx8G*?(oEMX^j=! zDN@)d!{zb$f{bU5rhXHtoW4FDeepxSG;H52);F`P)nlm zzes_XW}M+x=a+n zID&(Z600Wzxkcq$^ppvxBpphd{k+F^*8G0`CRASD4=KFol*icov%mGvD z1_N#86=bKIhS`2UFkKvxuolc!SGjx+dw{H3sDhf}1jD${Hwp=Hs#}>O{ABF%ZJS_s^dYA!i=HXlft}hOr5j`SSvEip-+N4|1%W z6)rm-7k-cg1Yhk7t$dAXc7 zgXhdqMG>&J)_ou)g8gE|zcT=;)?;@GsDU+rboD71N+DI;RR)4pY1F-?=gmg8T}sR|2!y1l7{vD{(~zHSYj-e6bZQpVY&I<^746717+Z zE3Tj}BkRYcz!legM}!{;5SMp242hegJ@NklVOcX@5|plG{{U&AMwQ}sFK{X#fEL8V zelZJR=gTle_Kw4cPi!dH&oe4NalV}P{^B>z;s6A4H4BPvqN*I#ed+NjlApN{P3Mf~ z#A<_%;MAsJYUR)KFL0NNz{M`FkF5X103{Fs00RI50s;a90RaI30000101+WEK~Z6G zfsvuH!SK=H@&DQY2mt{A0Y4DD`CL1fb!)KIy4NfO3IycpRQ`&JQ>QlLE{>0w_VZop zT0^bNgSATXQ3B1{Ipz>MoWlCE_+qJIt8osOsZMR0wDT&zJiAOlXRW^RtD@xH{%o zQ?^hoT}J`L8mwpgsCZ)6S3uMeG{CV~7p&mzq*b!F$nyX$&^t?y6zAqWs1(heL8pU= z0KBe7P$d`q&Gkj1*6Pix>S34wE>*sIexQ(l1HBvl?9*r^iO_3K;R?cl!BFUlc|ZC~ z3vLN{N9=-!szPH)4N7aFT^=IYRg|=+AH-`gw$vY}7^*vqhjM-)=}_bzU@)w$JW862 z%Gq6kj?=CQV@s9tustCJqNRY+Bk;}_nv50dk8y4F&BYgwP(ymr?fxQJAb@@6>Y(h2 zb*fxHxaVobqTJZNzF~s#X*F0jb5%KAujcYA0LF~Quc+3ZWnt05Ya>?c^#N7qcBcg8 zOfFF8j}n@Vq8p{0FL;ew>U6)Inq5A3qhv-qK{R`Nm=9@YwA%5 zkw*|{igg>60+@#w^)E%OFe}Ngv^;{PGr}~-^WXuyZvOyr>W>Z16RwCyNx~vEy^qxL~;()=nd<~a5RuO zZVUTk9P`Xczz$Acn9tgV1IC_&e=^O%Vva44{{ZA}7ecOl#Sn95!NnMQ6^r z_=6IMI6U{vFi=3ouStRfD;4+dG0W0`1=HsL0EhxH*wOwXLK?sn_b&of-#?hFp{2(2 z4>cSee9J=43K)@~`=6JJrF98Cu@pQX?jg;x&?5Kb zp+4n#{GzdM+^EGG0Rug&DtA2<>Rv@E**vR2=ZIegw#zZU<_sGbymt<7o6TD)Nhb`xdUO8ZO1CgTOdns%$n z$5Awi$9+OCH(ofFx=MV?v9rMu!=hHoD!SIL;i_V+Y7>22Rj}lEa6AHG69CV6{ot0o;c9@W)lz5FNI%m zLdgQ(9TJmzyhB8>zdXue0tmN$V7EdIfcPL;S8JHVL9a5i?C>e%?i3%oY(BY{D+4&K zz7wf*toA*@1C5H18}p0{QqVkAkw76?($(EzlR%e-@vrFp&5uCkz$#QB3kV} zpXM}bM3T8-*BI3V-iqLpUHoKOjsIva!Jkm`$YG z?4E1XD8MGJ(&y!d63?u(q$q<6ppt-&wAR zRwtcEhV{T8UIehWd&reGmm$?Oo&7^-ZTA(I4--kgt_{?%u4OY{g-7hWsdPBFQL?1y z9%ym%5TB;Y8vZ3L#zl`}hdglp;g9^ezBccNraY32jgcpLM6AuKMo7Ads4y_XaPag54mL1Nj2^&;nPgm5ZowgiE)8lvk#1<$sh1GY|10zV= za@QQ!j}rvqK)ozJiZ9Hxb_gqNc+O^VY)c;RcB}U_0H?JFnPsS1S&pBd+`6ch;tk79 z(R0%r#1+%&6J#;wB5zg`l*<#i%|iNPGhRNS(cc*u`l)zxa4F}Zvi#4b`5|CJbW7~_0axZa3)`0sIpk%xD`|h2MXUwNzcr0?)^^ox zW>8r;T3va6DSLOrhsfb z!A{eO&E125--%>P1>N@%UwqC=3yYAk;xXd{u@gw<$=Y&xJ|W#H^IYQ6%}3k%{$-0$ zZRz+E?8Pf7@=KJ%g7ze8+PK~ocFMJ-ORw1T2)h(PXhoxjj`%g=Blwkea6R;I#5xPC z)c2Tsg`iV3oXDrOt}Z!}hFg@ult}Ar2vIYN%(=-+w;mzHZp-iwQq1J}mJ9qb2xHig z#*+S+!Im-XlxjzhqG&+!SmG9*;aK5)6CNHHTQGDus0S!%5^I z3}9MZ2)r8V8k%yK->9=%wUEAyeOz>fy%`)l1hEH(S)(1(Zt=+eIe_Njxf-#Bt>?br zD_QAyf@hvlg}yvS4Od$OiAi@V;lfm5fr{e_8*LceIP-V)2BAeI9owI{g(GiW3d{S2 zb`5-a7w+S@5!DLbZ{Yr8l|-a9<9{<9RtZ_bG+kfA3^LL+ps%>Cl|*W(z6@-^TnMJ) zz%!YSfhcqhagf@e80>tTUHOG4P3V?+IPn7muM-pZIc)sKji`+tWrU~AGMc`kFBJ$x z@WT&vR1W}6V*JL;Y%Ie0erh0ZvEq0Gd39Isn8ScX0tk)cHUu47yskQw8tF&e=@MBe zzV8(Vkfck$dm7j0^^)63p{lJfE8-pdkmBnuUk%@;eL$~27rhj3JVk7aer1bnbhDJ?c$J5& zi*_{OmhY$PVuF>Y+D=f zn<+?DQmfN@g`nn-9Y0dh&40h%;1sG9=jx`|4K54+0C|cj7DA6u=GLE6mA3gOBeyx$ z8L(Bhw%l0OR4~{j91MdXWaef1d4+f3ouxg+5c7U^7XJWq7OP1uLz1R#yn$Cy2j4Lc zB%z6FgZPv#p`@rv+JQOl`NyLahxp!vqryg?kh{`9dix< zpag16EK8xBgSP4F60T;cP_iX$HXmCJ!JvS>{dt;ZoqK~HD?LVML#M=WtU$}YpfPUq za@WX9340da_M%vP#-p3-;xaXxEDt@(&irvGU4pVyIuLN1St=^dZ_J?qYkVgt`gZ1Gko5QVbwa z6Q#)7`-)Z#!gM}0m0Jog0MN7ACY(G4$LUh&s{$#j&R+8?$_a%J>RarFtsGOYbAe0l z_?wEmn5w>J9x|>pB`zSQ;{M+^0qFz|3&iF+emuZ1-_yjXuv4i+`Z|YCllzF+bJRfI zmEtKkK=h;#2|;A#FYzf+66Frx3E37lUswC@o&K5V&3U%VPtFPZ7LW zm3W=;8nRIXHcFE>o|~ve17Qq&i8hm_xLle=-!+GK>CVN!j>RIvY z(@h-oRJ7&&!Vuik!4tSW_}Nn9+NJC~AWY*$sd)nT(#8J(xA`WtPx&4Q^C@i{@Q~t~diNE&A2%J% zalzcMve*z%a&OeMpAaa0`<%&z>>Sk)QFeLVbivUQ)?@H}M`2KEiXwpFWj950EUed< ztPxh%3Ge1#0ui&QA_c}3$yE-HQ>38aUJ!0p{rty5JLWyW+C2J$>KAOO&^8>kbLpqx zU=$)9b2M<<0xnwOWM3-^`?7s5r8W)fZ2m3+p_T;l&JBQ8)UaKQ6&HO<&vyX;*Wxs3 z7jZ;eQnu=^h@jeuk&r4@DdJq7We*Gjx2(BhZSOG$2%*w#z9rH2%8hREO+}Itfbzrz zYsdTCUYfT0E-h>n)$>DGgcb_z{mvP0f-_p%&G!r<3HU(rv_Pub>uT-bue*RZ5Loa# zlW_$uxb{k{eLy&V@%thO=smng6c$=fK^H3fmu{;)!kK+!yl*o&+~VcKxcE~2MX>YU z%U!$iDg&L(SB`m_vBmWl3SV)n-r129_QnCOIjN_MFD_Q94c{`zOus(n9J>vG(K%o= zYnf6xyHAGYqef_pWs266LdLi5{{V3$7*nlxH(tsP8{%H9-2-p=%n7BHnq!)$L5!_8 zcN$O&;23-5=aYzHuB`_dgr>yLeEOC^%Q(Ja!ppnT0Rlx23m?G#36ckTyN0{rXK-`2 zAaSR|8k^_LA}+5n)pgG#*?bQZ8P;@l^E7w~V_o+f-wl%ZGt{MD(o&^8TEjr^%u5@r z&vSroTh+%$XD>5bXkx(Q8AD?ZZ-@aBpQylrV7_>TCO9cL>H>sm%3=QiVBN*8p&-n2 z#HiqC^mhO&QHMJJ0Dj=~LhL^gl2G3jAGCv)g87<3(kyag%t;M&xmn{PNj5dIjuW>@E-V`7O0bz9z?zIQ} zj}Gdgcy%a9X)XOhl}7jjoVdWnT^77D;T_RLt>J2UwQu{3f%k9wh$_&&9%Yy=jU`nZ z#?1Aw(zJ~QL@m_?!Aj5?eZZrUmK8~FUM7OKvVeA+#tUg-g{R+`Sc{QEF|3ZK@<$i23gAI~Ba8cc`Hp9wpFW{$q&b_=UJE{Kcdf zvYM~^8wUV%iYqBU+-I_|{J@|*C?b>=aRAp4h_U&Nptv}BoJ$EuP@odxQId=xpEBo= z`dgL;MO0>U3_^7Q)!*hC!La)O0DfQ?S-aFn(d&+**-l%(65uUy-XH3WB3JA%{^iwm zT)Z{UAMK1|NvU4t5TVLvF=7zYtM9>*{+`q>ovZjOkD9q_k3@rba^ElJFI=kKZVu;x zS7Rp^_Yk4cZR~R0dA_DAm)r%9%%!V=91EAf2N5zq;j>E%;1JAI zDPG}ZR4Xfb9jE}rT2o7gP}y6L>ZG(z;@uvvJm|!Ri&fLh&XEhPMwXg!i|q^?A6>_o z^6{I;d`jqa@?q2q9EL@Xo zj%D$Ur4~YHABw;Ijx+(wd;b6swIvGlmAbdw4G)J>hj|i*7H&sDek!Uki$F9$)msg5 zVE`has&WrMs0d#tz;pD)c|~jSEQ@$Q-w{h`TnqVrCxc1=Z0x_U+(bs!UfTG{O4iba zLBh8HxqlI(V|{JsV?ds%7X3h4vd7Ha#VqIh)URfvKvdJhS3GeqtL`{*C~uB$H{x6o z(eW1=x9gT>#^8uHEV|~texuWYqRSG!;e5Ru%*-YrmC4PvXT0$+duz@T2GXj!m_v8- zGW04iCSKJ{6}+tLV5}-Uypp3%@DkTCaCVj~*krSn16F09+}94cXN07m>lZ8%<4-cM1Z4 zax%2X4;)krPOKVa+=oBWU&OT-T8mvl6pF{J4j{Gc;5ol>(N6hxV5OH>V+cS}?}=r^ z*K0-o*nlw_^LgL7%jheLE|u=B^W&<4p|Cu<0Op#{dgOOB0}FN+!E1>rmZ9LA2gjqq z6+CJ)n9|KWMavEKa@zvCW$TJ+bpwzK2Pv?vf)w@Cvkp_=%vMfUbzc0-6zby8c9yRX z`}&BjH%?np!PW5oCoC6kDaE=8eaeQefT_S(8*w%}b{Y;tn8KR1rwD0Vp-vAl;d2z2 zC-DKxLydlCQLmNb+-eSZvWR!UpDE-t_+Ub>yh2Bk|(QoOb^in!?Rzumxvy{zI?=oWJogNUlP;#VQQWd*TWzODGERH_rfTXs3I5Gv?? zd8vqyZ+(0~C6b1pAsrDG_x5;)$UxoIK0YN4>f$0LLQaBMMzDB-$!EoDb^id_n!CYw z8={&oBGHV65#-`|0mV1?oife};FaVAv)l~;?A^;nm&0n6{j*0EOfEM9fki>t$t`b* zLeM${pe2MgAuRVQ4q+uQUJI2#}1I diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/statuscode-matcher-v4.json b/compatibility-suite/pact-compatibility-suite/fixtures/statuscode-matcher-v4.json deleted file mode 100644 index 8e4fad095..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/statuscode-matcher-v4.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "status": { - "combine": "AND", - "matchers": [ - { - "match": "statusCode", - "status": "success" - } - ] - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/text-body.xml b/compatibility-suite/pact-compatibility-suite/fixtures/text-body.xml deleted file mode 100644 index 09ea3839a..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/text-body.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - text/plain - Hello World! - diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/time-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/time-generator.json deleted file mode 100644 index bb115036f..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/time-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "Time" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/type-matcher-v2.json b/compatibility-suite/pact-compatibility-suite/fixtures/type-matcher-v2.json deleted file mode 100644 index 91502cb35..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/type-matcher-v2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "$.body.one": { - "match": "type" - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-lower-case-hyphenated.json b/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-lower-case-hyphenated.json deleted file mode 100644 index a340be1f2..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-lower-case-hyphenated.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "Uuid", "format": "lower-case-hyphenated" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-simple.json b/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-simple.json deleted file mode 100644 index 0b38e158e..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-simple.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "Uuid", "format": "simple" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-upper-case-hyphenated.json b/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-upper-case-hyphenated.json deleted file mode 100644 index 1239ac116..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-upper-case-hyphenated.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "Uuid", "format": "upper-case-hyphenated" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-urn.json b/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-urn.json deleted file mode 100644 index 51417f827..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator-urn.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "Uuid", "format": "URN" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator.json b/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator.json deleted file mode 100644 index e19abf2c2..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/uuid-generator.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "body": { - "$.one": { "type": "Uuid" } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/values-matcher-v3.json b/compatibility-suite/pact-compatibility-suite/fixtures/values-matcher-v3.json deleted file mode 100644 index 39c845f0d..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/values-matcher-v3.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "body": { - "$": { - "combine": "AND", - "matchers": [ - { - "match": "values" - } - ] - }, - "$.*": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - } - } -} diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/xml-body.xml b/compatibility-suite/pact-compatibility-suite/fixtures/xml-body.xml deleted file mode 100644 index 7264bd472..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/xml-body.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - application/xml - -AB -]]> - - diff --git a/compatibility-suite/pact-compatibility-suite/fixtures/xml2-body.xml b/compatibility-suite/pact-compatibility-suite/fixtures/xml2-body.xml deleted file mode 100644 index 2ab6248b9..000000000 --- a/compatibility-suite/pact-compatibility-suite/fixtures/xml2-body.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - application/xml - -CD -]]> - - diff --git a/compatibility-suite/tests/shared_steps/consumer.rs b/compatibility-suite/tests/shared_steps/consumer.rs deleted file mode 100644 index 889560252..000000000 --- a/compatibility-suite/tests/shared_steps/consumer.rs +++ /dev/null @@ -1,686 +0,0 @@ -use std::collections::hash_map::Entry; -use std::fs; -use std::fs::File; -use std::io::Read; -use std::path::PathBuf; -use std::sync::{Arc, Mutex}; - -use anyhow::anyhow; -use bytes::Bytes; -use cucumber::{given, then, when, World}; -use cucumber::gherkin::Step; -use itertools::Itertools; -use pact_models::{Consumer, PactSpecification, Provider}; -use pact_models::bodies::OptionalBody; -use pact_models::headers::parse_header; -use pact_models::http_parts::HttpPart; -use pact_models::pact::{Pact, read_pact}; -use pact_models::query_strings::parse_query_string; -use pact_models::sync_interaction::RequestResponseInteraction; -use pact_models::sync_pact::RequestResponsePact; -use pact_models::v4::http_parts::HttpResponse; -use serde_json::Value; -use uuid::Uuid; - -use pact_matching::Mismatch; -use pact_mock_server::matching::MatchResult; -use pact_mock_server::mock_server::{MockServer, MockServerConfig}; -use pact_verifier::{NullRequestFilterExecutor, ProviderInfo, ProviderTransport, VerificationOptions}; -use pact_verifier::provider_client::make_provider_request; - -use crate::shared_steps::{IndexType, setup_body, setup_common_interactions}; - -#[derive(Debug, World)] -pub struct ConsumerWorld { - pub interactions: Vec, - pub mock_server_key: String, - pub mock_server: Arc>, - pub response: HttpResponse, - pub scenario_id: String, - pub pact: Box -} - -impl Default for ConsumerWorld { - fn default() -> Self { - ConsumerWorld { - interactions: vec![], - mock_server_key: "".to_string(), - mock_server: Arc::new(Mutex::new(Default::default())), - response: Default::default(), - scenario_id: "".to_string(), - pact: RequestResponsePact::default().boxed() - } - } -} - -#[given("the following HTTP interactions have been defined:")] -fn the_following_http_interactions_have_been_setup(world: &mut ConsumerWorld, step: &Step) { - if let Some(table) = step.table.as_ref() { - let interactions = setup_common_interactions(table); - world.interactions.extend(interactions); - } -} - -#[when(expr = "the mock server is started with interaction {int}")] -async fn the_mock_server_is_started_with_interaction(world: &mut ConsumerWorld, interaction: usize) -> anyhow::Result<()> { - let pact = RequestResponsePact { - consumer: Consumer { name: "v1-compatibility-suite-c".to_string() }, - provider: Provider { name: "p".to_string() }, - interactions: vec![ world.interactions.get(interaction - 1).unwrap().clone() ], - specification_version: PactSpecification::V1, - .. RequestResponsePact::default() - }; - world.mock_server_key = Uuid::new_v4().to_string(); - let config = MockServerConfig { - pact_specification: PactSpecification::V1, - .. MockServerConfig::default() - }; - let (mock_server, future) = MockServer::new( - world.mock_server_key.clone(), pact.boxed(), "[::1]:0".parse()?, config - ).await.map_err(|err| anyhow!(err))?; - tokio::spawn(future); - world.mock_server = mock_server; - Ok(()) -} - -#[when(expr = "the mock server is started with interaction {int} but with the following changes:")] -async fn the_mock_server_is_started_with_interaction_but_with_the_following_changes( - world: &mut ConsumerWorld, - step: &Step, - interaction: usize -) -> anyhow::Result<()> { - let mut request_response_interaction = world.interactions - .get(interaction - 1).unwrap().clone(); - - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "method" => request_response_interaction.request.method = value.clone(), - "path" => request_response_interaction.request.path = value.clone(), - "query" => request_response_interaction.request.query = parse_query_string(value), - "headers" => { - let headers = value.split(",") - .map(|header| { - let key_value = header.strip_prefix("'").unwrap_or(header) - .strip_suffix("'").unwrap_or(header) - .splitn(2, ":") - .map(|v| v.trim()) - .collect::>(); - (key_value[0].to_string(), parse_header(key_value[0], key_value[1])) - }).collect(); - request_response_interaction.request.headers = Some(headers); - }, - "body" => setup_body(value, &mut request_response_interaction.request, None), - _ => {} - } - } - } - } - - let pact = RequestResponsePact { - consumer: Consumer { name: "v1-compatibility-suite-c".to_string() }, - provider: Provider { name: "p".to_string() }, - interactions: vec![request_response_interaction], - specification_version: PactSpecification::V1, - .. RequestResponsePact::default() - }; - world.mock_server_key = Uuid::new_v4().to_string(); - let config = MockServerConfig { - pact_specification: PactSpecification::V1, - .. MockServerConfig::default() - }; - let (mock_server, future) = MockServer::new( - world.mock_server_key.clone(), pact.boxed(), "[::1]:0".parse()?, config - ).await.map_err(|err| anyhow!(err))?; - tokio::spawn(future); - world.mock_server = mock_server; - Ok(()) -} - -#[when(expr = "the mock server is started with interactions {string}")] -async fn the_mock_server_is_started_with_interactions(world: &mut ConsumerWorld, ids: String) -> anyhow::Result<()> { - let interactions = ids.split(",") - .map(|id| id.trim().parse::().unwrap()) - .map(|index| world.interactions.get(index - 1).unwrap().clone()) - .collect(); - let pact = RequestResponsePact { - consumer: Consumer { name: "v1-compatibility-suite-c".to_string() }, - provider: Provider { name: "p".to_string() }, - interactions, - specification_version: PactSpecification::V1, - .. RequestResponsePact::default() - }; - world.mock_server_key = Uuid::new_v4().to_string(); - let config = MockServerConfig { - pact_specification: PactSpecification::V1, - .. MockServerConfig::default() - }; - let (mock_server, future) = MockServer::new( - world.mock_server_key.clone(), pact.boxed(), "[::1]:0".parse()?, config - ).await.map_err(|err| anyhow!(err))?; - tokio::spawn(future); - world.mock_server = mock_server; - Ok(()) -} - -#[when(expr = "request {int} is made to the mock server")] -async fn request_is_made_to_the_mock_server(world: &mut ConsumerWorld, num: usize) -> anyhow::Result<()> { - let request = world.interactions.get(num - 1).unwrap() - .request.as_v4_request(); - let port = { - let guard = world.mock_server.lock().unwrap(); - guard.port - }; - let transport = ProviderTransport { - port, - ..ProviderTransport::default() - }; - let provider_info = ProviderInfo { - host: "[::1]".to_string(), - transports: vec![transport.clone()], - .. ProviderInfo::default() - }; - let verification_options = VerificationOptions { - request_filter: None::>, - .. VerificationOptions::default() - }; - let client = reqwest::Client::builder().build()?; - world.response = make_provider_request( - &provider_info, &request, &verification_options, &client, Some(transport) - ).await?; - Ok(()) -} - -#[when(expr = "request {int} is made to the mock server with the following changes:")] -async fn request_is_made_to_the_mock_server_with_the_following_changes( - world: &mut ConsumerWorld, - step: &Step, - num: usize -) -> anyhow::Result<()> { - let mut request = world.interactions.get(num - 1).unwrap() - .request.as_v4_request(); - - let mut raw_headers = vec![]; - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "method" => request.method = value.clone(), - "path" => request.path = value.clone(), - "query" => request.query = parse_query_string(value), - "headers" => { - let headers = value.split(",") - .map(|header| { - let key_value = header.strip_prefix("'").unwrap_or(header) - .strip_suffix("'").unwrap_or(header) - .splitn(2, ":") - .map(|v| v.trim()) - .collect::>(); - (key_value[0].to_string(), parse_header(key_value[0], key_value[1])) - }).collect(); - request.headers = Some(headers); - }, - "body" => setup_body(value, &mut request, None), - "raw headers" => { - raw_headers.extend(value.split(',').map(|h| { - h.trim() - .strip_prefix("'").unwrap_or(h) - .strip_suffix("'").unwrap_or(h) - .splitn(2, ":") - .map(|v| v.trim().to_string()) - .collect_tuple::<(String, String)>() - .unwrap() - })); - } - _ => {} - } - } - } - } - - let port = { - let guard = world.mock_server.lock().unwrap(); - guard.port - }; - let transport = ProviderTransport { - port, - ..ProviderTransport::default() - }; - let provider_info = ProviderInfo { - host: "[::1]".to_string(), - transports: vec![transport.clone()], - .. ProviderInfo::default() - }; - let verification_options = VerificationOptions { - request_filter: None::>, - .. VerificationOptions::default() - }; - let headers = request.headers_mut(); - for (k, v) in raw_headers { - match headers.entry(k) { - Entry::Occupied(mut entry) => { - entry.get_mut().push(v.clone()); - } - Entry::Vacant(entry) => { - entry.insert(vec![ v.clone() ]); - } - } - } - let client = reqwest::Client::builder() - .build()?; - world.response = make_provider_request( - &provider_info, &request, &verification_options, &client, Some(transport) - ).await?; - - Ok(()) -} - -#[then(expr = "a {int} success response is returned")] -fn a_success_response_is_returned(world: &mut ConsumerWorld, status: u16) -> anyhow::Result<()> { - if world.response.status == status { - Ok(()) - } else { - Err(anyhow!("Expected a success response of {} but got {} ({:?})", status, world.response.status, world.response)) - } -} - -#[then(expr = "a {int} error response is returned")] -fn a_error_response_is_returned(world: &mut ConsumerWorld, status: u16) -> anyhow::Result<()> { - if world.response.status == status { - Ok(()) - } else { - Err(anyhow!("Expected an error response of {} but got {} ({:?})", status, world.response.status, world.response)) - } -} - -#[then(expr = "the payload will contain the {string} JSON document")] -fn the_payload_will_contain_the_json_document(world: &mut ConsumerWorld, name: String) -> anyhow::Result<()> { - let mut fixture = File::open(format!("pact-compatibility-suite/fixtures/{}.json", name))?; - let mut buffer = Vec::new(); - fixture.read_to_end(&mut buffer)?; - let actual_body = world.response.body.value().unwrap_or_default(); - if &actual_body == buffer.as_slice() { - Ok(()) - } else { - let body = OptionalBody::Present(Bytes::from(buffer), None, None); - Err(anyhow!("Expected payload with {} but got {}", world.response.body.display_string(), - body.display_string())) - } -} - -#[then(expr = "the content type will be set as {string}")] -fn the_content_type_will_be_set_as(world: &mut ConsumerWorld, string: String) -> anyhow::Result<()> { - if let Some(header) = world.response.lookup_header_value("content-type") { - if header == string { - Ok(()) - } else { - Err(anyhow!("Expected response content-type of '{}' but was '{}'", string, header)) - } - } else { - Err(anyhow!("Response does not contain a content-type header")) - } -} - -#[when("the pact test is done")] -fn the_pact_test_is_done(world: &mut ConsumerWorld) -> anyhow::Result<()> { - let mut mockserver = world.mock_server.lock().unwrap(); - mockserver.shutdown().map_err(|err| anyhow!(err))?; - - let mismatches = mockserver.mismatches(); - if mismatches.is_empty() { - let dir = PathBuf::from("target/compatibility-suite/v1").join(&world.scenario_id); - fs::create_dir_all(&dir)?; - mockserver.write_pact(&Some(dir.to_string_lossy().to_string()), true)?; - } - - Ok(()) -} - -#[then(expr = "the mock server will write out a Pact file for the interaction(s) when done")] -fn the_mock_server_will_write_out_a_pact_file_for_the_interaction_when_done(world: &mut ConsumerWorld) -> anyhow::Result<()> { - let dir = PathBuf::from("target/compatibility-suite/v1").join(&world.scenario_id); - let pact_file = dir.join("v1-compatibility-suite-c-p.json"); - if pact_file.exists() { - let pact = read_pact(&pact_file)?; - if pact.specification_version() == PactSpecification::V1 { - world.pact = pact; - Ok(()) - } else { - Err(anyhow!("Expected Pact file to be V1 Pact, but was {}", pact.specification_version())) - } - } else { - Err(anyhow!("No pact file found: {}", pact_file.to_string_lossy())) - } -} - -#[then(expr = "the mock server will NOT write out a Pact file for the interaction(s) when done")] -fn the_mock_server_will_not_write_out_a_pact_file_for_the_interaction_when_done(world: &mut ConsumerWorld) -> anyhow::Result<()> { - let dir = PathBuf::from("target/compatibility-suite/v1").join(&world.scenario_id); - let pact_file = dir.join("v1-compatibility-suite-c-p.json"); - if pact_file.exists() { - Err(anyhow!("Expected no pact file, but found: {}", pact_file.to_string_lossy())) - } else { - Ok(()) - } -} - -#[then("the mock server status will be OK")] -fn the_mock_server_status_will_be_ok(world: &mut ConsumerWorld) -> anyhow::Result<()> { - let mock_server = world.mock_server.lock().unwrap(); - if mock_server.mismatches().is_empty() { - Ok(()) - } else { - Err(anyhow!("Mock server has {} mismatches", mock_server.mismatches().len())) - } -} - -#[then("the mock server status will NOT be OK")] -fn the_mock_server_status_will_be_error(world: &mut ConsumerWorld) -> anyhow::Result<()> { - let mock_server = world.mock_server.lock().unwrap(); - if mock_server.mismatches().is_empty() { - Err(anyhow!("Mock server has no mismatches")) - } else { - Ok(()) - } -} - -#[then(expr = "the pact file will contain \\{{int}} interaction(s)")] -fn the_pact_file_will_contain_interaction(world: &mut ConsumerWorld, num: usize) -> anyhow::Result<()> { - let i = world.pact.interactions().len(); - if i == num { - Ok(()) - } else { - Err(anyhow!("Expected the pact file to contain {} interaction(s), but had {}", num, i)) - } -} - -#[then(expr = "the \\{{numType}} interaction request will be for a {string}")] -fn the_interaction_request_will_be_for_a(world: &mut ConsumerWorld, num: IndexType, method: String) -> anyhow::Result<()> { - if let Some(interaction) = world.pact.interactions().get(num.val()) { - if let Some(reqres) = interaction.as_request_response() { - if reqres.request.method == method { - Ok(()) - } else { - Err(anyhow!("Expected interaction {} request to be for a {} but was a {}", num.val() + 1, method, reqres.request.method)) - } - } else { - Err(anyhow!("Interaction {} is not a RequestResponseInteraction", num.val() + 1)) - } - } else { - Err(anyhow!("Did not find interaction {} in the Pact", num.val() + 1)) - } -} - -#[then(expr = "the \\{{numType}} interaction response will contain the {string} document")] -fn the_interaction_response_will_contain_the_document(world: &mut ConsumerWorld, num: IndexType, fixture: String) -> anyhow::Result<()> { - if let Some(interaction) = world.pact.interactions().get(num.val()) { - if let Some(reqres) = interaction.as_request_response() { - let mut fixture_file = File::open(format!("pact-compatibility-suite/fixtures/{}", fixture))?; - let mut buffer = Vec::new(); - fixture_file.read_to_end(&mut buffer)?; - - let mut expected = Vec::new(); - if fixture.ends_with(".json") { - let json: Value = serde_json::from_slice(&buffer)?; - let string = json.to_string(); - expected.extend_from_slice(string.as_bytes()); - } else { - expected.extend_from_slice(&buffer); - } - let actual_body = reqres.response.body.value().unwrap_or_default(); - if &actual_body == expected.as_slice() { - Ok(()) - } else { - let body = OptionalBody::Present(Bytes::from(buffer), None, None); - Err(anyhow!("Expected Interaction {} response payload with {} but got {}", num.val() + 1, - reqres.response.body.display_string(), body.display_string())) - } - } else { - Err(anyhow!("Interaction {} is not a RequestResponseInteraction", num.val() + 1)) - } - } else { - Err(anyhow!("Did not find interaction {} in the Pact", num.val() + 1)) - } -} - -#[then(expr = "the mock server status will be an expected but not received error for interaction \\{{int}}")] -fn the_mock_server_status_will_be_an_expected_but_not_received_error_for_interaction( - world: &mut ConsumerWorld, - num: usize -) -> anyhow::Result<()> { - let mock_server = { world.mock_server.lock().unwrap().clone() }; - if let Some(interaction) = world.interactions.get(num - 1) { - if let Some(_) = mock_server.mismatches().iter().find(|mismatch| { - match mismatch { - MatchResult::MissingRequest(request) => request == &interaction.request.as_v4_request(), - _ => false - } - }) { - Ok(()) - } else { - Err(anyhow!("Did not find a MissingRequest mismatch for Interaction {}", num)) - } - } else { - Err(anyhow!("Did not find interaction {} in the Pact", num)) - } -} - -#[then(expr = "the \\{{numType}} interaction request query parameters will be {string}")] -fn the_interaction_request_query_parameters_will_be( - world: &mut ConsumerWorld, - num: IndexType, - query_str: String -) -> anyhow::Result<()> { - if let Some(interaction) = world.pact.interactions().get(num.val()) { - if let Some(reqres) = interaction.as_request_response() { - if reqres.request.query == parse_query_string(query_str.as_str()) { - Ok(()) - } else { - Err(anyhow!("Expected interaction {} request to have query {} but was {:?}", num.val() + 1, query_str, reqres.request.query)) - } - } else { - Err(anyhow!("Interaction {} is not a RequestResponseInteraction", num.val() + 1)) - } - } else { - Err(anyhow!("Did not find interaction {} in the Pact", num.val() + 1)) - } -} - -#[then("the mock server status will be mismatches")] -fn the_mock_server_status_will_be_mismatches(world: &mut ConsumerWorld) -> anyhow::Result<()> { - let mock_server = world.mock_server.lock().unwrap(); - if mock_server.mismatches().is_empty() { - Err(anyhow!("Mock server has no mismatches")) - } else { - Ok(()) - } -} - -#[then(expr = "the mismatches will contain a {string} mismatch with error {string}")] -fn the_mismatches_will_contain_a_mismatch_with_error( - world: &mut ConsumerWorld, - mismatch_type: String, - error: String -) -> anyhow::Result<()> { - let mock_server = world.mock_server.lock().unwrap(); - let mismatches: Vec<_> = mock_server.mismatches().iter() - .flat_map(|m| match m { - MatchResult::RequestMismatch(_, _, mismatches) => mismatches.clone(), - _ => vec![] - }) - .collect(); - if mismatches.iter().find(|ms| { - let correct_type = match ms { - Mismatch::BodyTypeMismatch { .. } => mismatch_type == "body-content-type", - _ => ms.mismatch_type().to_lowercase().starts_with(mismatch_type.as_str()) - }; - correct_type && ms.description().contains(error.as_str()) - }).is_some() { - Ok(()) - } else { - Err(anyhow!("Did not find a {} mismatch with error {}", mismatch_type, error)) - } -} - -#[then(expr = "the mock server status will be an unexpected {string} request received error for interaction \\{{int}}")] -fn the_mock_server_status_will_be_an_unexpected_request_received_error_for_interaction( - world: &mut ConsumerWorld, - method: String, - num: usize -) -> anyhow::Result<()> { - let mock_server = { world.mock_server.lock().unwrap().clone() }; - if let Some(interaction) = world.interactions.get(num - 1) { - if let Some(_) = mock_server.mismatches().iter().find(|mismatch| { - match mismatch { - MatchResult::RequestNotFound(request) => request.method == method && - request.path == interaction.request.path && request.query == interaction.request.query, - _ => false - } - }) { - Ok(()) - } else { - Err(anyhow!("Did not find a RequestNotFound mismatch for Interaction {}", num)) - } - } else { - Err(anyhow!("Did not find interaction {} in the Pact", num)) - } -} - -#[then(expr = "the mock server status will be an unexpected {string} request received error for path {string}")] -fn the_mock_server_status_will_be_an_unexpected_request_received_error( - world: &mut ConsumerWorld, - method: String, - path: String -) -> anyhow::Result<()> { - let mock_server = { world.mock_server.lock().unwrap().clone() }; - if let Some(_) = mock_server.mismatches().iter().find(|mismatch| { - match mismatch { - MatchResult::RequestNotFound(request) => request.method == method && - request.path == path, - _ => false - } - }) { - Ok(()) - } else { - Err(anyhow!("Did not find a RequestNotFound mismatch for path {}", path)) - } -} - -#[then(expr = "the \\{{numType}} interaction request will contain the header {string} with value {string}")] -fn the_interaction_request_will_contain_the_header_with_value( - world: &mut ConsumerWorld, - num: IndexType, - key: String, - value: String -) -> anyhow::Result<()> { - if let Some(interaction) = world.pact.interactions().get(num.val()) { - if let Some(reqres) = interaction.as_request_response() { - if let Some(header_value) = reqres.request.lookup_header_value(&key) { - if header_value == value { - Ok(()) - } else { - Err(anyhow!("Expected interaction {} request to have a header {} with value {} but got {}", num.val() + 1, key, value, header_value)) - } - } else { - Err(anyhow!("Expected interaction {} request to have a header {} with value {}", num.val() + 1, key, value)) - } - } else { - Err(anyhow!("Interaction {} is not a RequestResponseInteraction", num.val() + 1)) - } - } else { - Err(anyhow!("Did not find interaction {} in the Pact", num.val() + 1)) - } -} - -#[then(expr = "the \\{{numType}} interaction request content type will be {string}")] -fn the_interaction_request_content_type_will_be( - world: &mut ConsumerWorld, - num: IndexType, - content_type: String -) -> anyhow::Result<()> { - if let Some(interaction) = world.pact.interactions().get(num.val()) { - if let Some(reqres) = interaction.as_request_response() { - if let Some(ct) = reqres.request.content_type() { - if ct.to_string() == content_type { - Ok(()) - } else { - Err(anyhow!("Expected interaction {} request to have a content type of {} but got {}", num.val() + 1, content_type, ct)) - } - } else { - Err(anyhow!("Interaction {} request does not have a content type set", num.val() + 1)) - } - } else { - Err(anyhow!("Interaction {} is not a RequestResponseInteraction", num.val() + 1)) - } - } else { - Err(anyhow!("Did not find interaction {} in the Pact", num.val() + 1)) - } -} - -#[then(expr = "the \\{{numType}} interaction request will contain the {string} document")] -fn the_interaction_request_will_contain_the_document( - world: &mut ConsumerWorld, - num: IndexType, - fixture: String, -) -> anyhow::Result<()> { - if let Some(interaction) = world.pact.interactions().get(num.val()) { - if let Some(reqres) = interaction.as_request_response() { - let mut fixture_file = File::open(format!("pact-compatibility-suite/fixtures/{}", fixture))?; - let mut buffer = Vec::new(); - fixture_file.read_to_end(&mut buffer)?; - - let mut expected = Vec::new(); - if fixture.ends_with(".json") { - let json: Value = serde_json::from_slice(&buffer)?; - let string = json.to_string(); - expected.extend_from_slice(string.as_bytes()); - } else { - expected.extend_from_slice(&buffer); - } - let actual_body = reqres.request.body.value().unwrap_or_default(); - if &actual_body == expected.as_slice() { - Ok(()) - } else { - let body = OptionalBody::Present(Bytes::from(buffer), None, None); - Err(anyhow!("Expected Interaction {} request with body {} but got {}", num.val() + 1, - reqres.request.body.display_string(), body.display_string())) - } - } else { - Err(anyhow!("Interaction {} is not a RequestResponseInteraction", num.val() + 1)) - } - } else { - Err(anyhow!("Did not find interaction {} in the Pact", num.val() + 1)) - } -} - -#[then(expr = "the mismatches will contain a {string} mismatch with path {string} with error {string}")] -fn the_mismatches_will_contain_a_mismatch_with_path_with_error( - world: &mut ConsumerWorld, - mismatch_type: String, - error_path: String, - error: String -) -> anyhow::Result<()> { - let mock_server = world.mock_server.lock().unwrap(); - let mismatches: Vec<_> = mock_server.mismatches().iter() - .flat_map(|m| match m { - MatchResult::RequestMismatch(_, _, mismatches) => mismatches.clone(), - _ => vec![] - }) - .collect(); - if mismatches.iter().find(|ms| { - let correct_type = match ms { - Mismatch::QueryMismatch { parameter, .. } => mismatch_type == "query" && parameter == &error_path, - Mismatch::HeaderMismatch { key, .. } => mismatch_type == "header" && key == &error_path, - Mismatch::BodyMismatch { path, .. } => mismatch_type == "body" && path == &error_path, - _ => false - }; - correct_type && ms.description().contains(&error) - }).is_some() { - Ok(()) - } else { - Err(anyhow!("Did not find a {} mismatch for path {} with error {}", mismatch_type, error_path, error)) - } -} diff --git a/compatibility-suite/tests/shared_steps/mod.rs b/compatibility-suite/tests/shared_steps/mod.rs deleted file mode 100644 index 7f6261225..000000000 --- a/compatibility-suite/tests/shared_steps/mod.rs +++ /dev/null @@ -1,344 +0,0 @@ -use std::collections::HashMap; -use std::fs::File; -use std::io::Read; -use std::str::FromStr; - -use anyhow::{anyhow, Error}; -use bytes::Bytes; -use cucumber::gherkin::Table; -use cucumber::Parameter; -use lazy_static::lazy_static; -use pact_models::bodies::OptionalBody; -use pact_models::content_types::{ContentType, JSON, TEXT, XML}; -use pact_models::headers::parse_header; -use pact_models::http_parts::HttpPart; -use pact_models::json_utils::json_to_string; -use pact_models::matchingrules::matchers_from_json; -use pact_models::query_strings::parse_query_string; -use pact_models::sync_interaction::RequestResponseInteraction; -use pact_models::xml_utils::parse_bytes; -use regex::Regex; -use serde_json::{json, Value}; -use sxd_document::dom::Element; -use uuid::Uuid; - -pub mod consumer; -pub mod provider; - -lazy_static! { - static ref INT_REGEX: Regex = Regex::new(r"\d+").unwrap(); - static ref DEC_REGEX: Regex = Regex::new(r"\d+\.\d+").unwrap(); - static ref HEX_REGEX: Regex = Regex::new(r"[a-fA-F0-9]+").unwrap(); - static ref STR_REGEX: Regex = Regex::new(r"\d{1,8}").unwrap(); - static ref DATE_REGEX: Regex = Regex::new(r"\d{4}-\d{2}-\d{2}").unwrap(); - static ref TIME_REGEX: Regex = Regex::new(r"\d{2}:\d{2}:\d{2}").unwrap(); - static ref DATETIME_REGEX: Regex = Regex::new(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{1,9}").unwrap(); -} - -#[derive(Debug, Default, Parameter)] -#[param(name = "numType", regex = "first|second|third")] -pub struct IndexType(usize); - -impl IndexType { - pub fn val(&self) -> usize { - self.0 - } -} - -impl FromStr for IndexType { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result { - match s { - "first" => Ok(IndexType(0)), - "second" => Ok(IndexType(1)), - "third" => Ok(IndexType(2)), - _ => Err(anyhow!("{} is not a valid index type", s)) - } - } -} - -pub fn setup_common_interactions(table: &Table) -> Vec { - let headers = table.rows.first().unwrap().iter() - .enumerate() - .map(|(index, h)| (h.clone(), index)) - .collect::>(); - let mut interactions = vec![]; - for (row, values) in table.rows.iter().skip(1).enumerate() { - let mut interaction = RequestResponseInteraction { - description: format!("Interaction {}", row), - id: Some(format!("ID{}", row + 1)), - ..RequestResponseInteraction::default() - }; - - if let Some(index) = headers.get("method") { - if let Some(method) = values.get(*index) { - interaction.request.method = method.clone(); - } - } - - if let Some(index) = headers.get("path") { - if let Some(path) = values.get(*index) { - interaction.request.path = path.clone(); - } - } - - if let Some(index) = headers.get("query") { - if let Some(query) = values.get(*index) { - interaction.request.query = parse_query_string(query); - } - } - - if let Some(index) = headers.get("headers") { - if let Some(headers) = values.get(*index) { - if !headers.is_empty() { - let headers = headers.split(",") - .map(|header| { - let key_value = header.strip_prefix("'").unwrap_or(header) - .strip_suffix("'").unwrap_or(header) - .splitn(2, ":") - .map(|v| v.trim()) - .collect::>(); - (key_value[0].to_string(), parse_header(key_value[0], key_value[1])) - }).collect(); - interaction.request.headers = Some(headers); - } - } - } - - if let Some(index) = headers.get("body") { - if let Some(body) = values.get(*index) { - setup_body(body, &mut interaction.request, None); - } - } - - if let Some(index) = headers.get("matching rules") { - if let Some(rules) = values.get(*index) { - let json: Value = if rules.starts_with("JSON:") { - serde_json::from_str(rules.strip_prefix("JSON:").unwrap_or(rules)).unwrap() - } else { - let file = File::open(format!("pact-compatibility-suite/fixtures/{}", rules)).unwrap(); - serde_json::from_reader(file).unwrap() - }; - interaction.request.matching_rules = matchers_from_json(&json!({"matchingRules": json}), &None).unwrap(); - } - } - - if let Some(index) = headers.get("response") { - if let Some(response) = values.get(*index) { - interaction.response.status = response.parse().unwrap(); - } - } - - if let Some(index) = headers.get("response headers") { - if let Some(headers) = values.get(*index) { - if !headers.is_empty() { - let headers = headers.split(",") - .map(|header| { - let key_value = header.strip_prefix("'").unwrap_or(header) - .strip_suffix("'").unwrap_or(header) - .splitn(2, ":") - .map(|v| v.trim()) - .collect::>(); - (key_value[0].to_string(), parse_header(key_value[0], key_value[1])) - }).collect(); - interaction.response.headers = Some(headers); - } - } - } - - if let Some(index) = headers.get("response body") { - if let Some(body) = values.get(*index) { - if !body.is_empty() { - setup_body(body, &mut interaction.response, None); - } - } - } - - if let Some(index) = headers.get("response matching rules") { - if let Some(rules) = values.get(*index) { - let json: Value = if rules.starts_with("JSON:") { - serde_json::from_str(rules.strip_prefix("JSON:").unwrap_or(rules)).unwrap() - } else { - let file = File::open(format!("pact-compatibility-suite/fixtures/{}", rules)).unwrap(); - serde_json::from_reader(file).unwrap() - }; - interaction.response.matching_rules = matchers_from_json(&json!({"matchingRules": json}), &None).unwrap(); - } - } - - interactions.push(interaction); - } - interactions -} - -pub fn setup_body(body: &String, httppart: &mut dyn HttpPart, content_type: Option<&str>) { - if !body.is_empty() { - if body.starts_with("JSON:") { - httppart.add_header("content-type", vec!["application/json"]); - *httppart.body_mut() = OptionalBody::Present(Bytes::from(body.strip_prefix("JSON:").unwrap_or(body).trim().to_string()), - Some(JSON.clone()), None); - } else if body.starts_with("XML:") { - httppart.add_header("content-type", vec!["application/xml"]); - *httppart.body_mut() = OptionalBody::Present(Bytes::from(body.strip_prefix("XML:").unwrap_or(body).trim().to_string()), - Some(XML.clone()), None); - } else if body.starts_with("file:") { - if body.ends_with("-body.xml") { - let file_name = body.strip_prefix("file:").unwrap_or(body).trim(); - let mut f = File::open(format!("pact-compatibility-suite/fixtures/{}", file_name)) - .expect(format!("could not load fixture '{}'", body).as_str()); - let mut buffer = Vec::new(); - f.read_to_end(&mut buffer) - .expect(format!("could not read fixture '{}'", body).as_str()); - let fixture = parse_bytes(buffer.as_slice()) - .expect(format!("could not parse fixture as XML: '{}'", body).as_str()); - let root = fixture.as_document().root(); - let body_node = root.children().iter().find_map(|n| n.element()).unwrap(); - let content_type = element_text(body_node, "contentType").unwrap_or("text/plain".to_string()); - httppart.add_header("content-type", vec![content_type.as_str()]); - *httppart.body_mut() = OptionalBody::Present(Bytes::from(element_text(body_node, "contents").unwrap_or_default()), - ContentType::parse(content_type.as_str()).ok(), None); - } else { - let content_type = content_type.map(|ct| ContentType::from(ct)) - .unwrap_or_else(|| determine_content_type(body, httppart)); - httppart.add_header("content-type", vec![content_type.to_string().as_str()]); - - let file_name = body.strip_prefix("file:").unwrap_or(body).trim(); - let mut f = File::open(format!("pact-compatibility-suite/fixtures/{}", file_name)) - .expect(format!("could not load fixture '{}'", body).as_str()); - let mut buffer = Vec::new(); - f.read_to_end(&mut buffer) - .expect(format!("could not read fixture '{}'", body).as_str()); - *httppart.body_mut() = OptionalBody::Present(Bytes::from(buffer), - Some(content_type), None); - } - } else { - let body = if body == "EMPTY" { - "".to_string() - } else { - body.clone() - }; - let content_type = content_type.map(|ct| ContentType::from(ct)) - .unwrap_or_else(|| determine_content_type(&body, httppart)); - httppart.add_header("content-type", vec![content_type.to_string().as_str()]); - let body = Bytes::from(body); - *httppart.body_mut() = OptionalBody::Present(body, Some(content_type), None); - } - } -} - -pub fn element_text(root: Element, name: &str) -> Option { - root.children().iter() - .filter_map(|n| n.element()) - .find_map(|n| { - if n.name().local_part().to_string() == name { - let string = n.children().iter() - .filter_map(|child| child.text().map(|t| t.text().trim())) - .collect::(); - if let Some(line_endings) = n.attribute_value("eol") { - if line_endings == "CRLF" && !cfg!(windows) { - Some(string.replace('\n', "\r\n")) - } else { - Some(string) - } - } else { - Some(string) - } - } else { - None - } - }) -} - -pub fn determine_content_type(body: &String, httppart: &mut dyn HttpPart) -> ContentType { - if body.ends_with(".json") { - JSON.clone() - } else if body.ends_with(".xml") { - XML.clone() - } else if body.ends_with(".jpg") { - ContentType::from("image/jpeg") - } else if body.ends_with(".pdf") { - ContentType::from("application/pdf") - } else { - httppart.content_type().unwrap_or(TEXT.clone()) - } -} - -pub fn assert_value_type(value_type: String, element: &Value) -> Result<(), Error> { - match value_type.as_str() { - "integer" => { - if !INT_REGEX.is_match(json_to_string(element).as_str()) { - Err(anyhow!("Was expecting an integer, but got {}", element)) - } else { - Ok(()) - } - } - "decimal number" => { - if !DEC_REGEX.is_match(json_to_string(element).as_str()) { - Err(anyhow!("Was expecting a decimal number, but got {}", element)) - } else { - Ok(()) - } - } - "hexadecimal number" => { - if !HEX_REGEX.is_match(json_to_string(element).as_str()) { - Err(anyhow!("Was expecting a hexadecimal number, but got {}", element)) - } else { - Ok(()) - } - } - "random string" => { - if !element.is_string() { - Err(anyhow!("Was expecting a string, but got {}", element)) - } else { - Ok(()) - } - } - "string from the regex" => { - if !element.is_string() { - Err(anyhow!("Was expecting a string, but got {}", element)) - } else if !STR_REGEX.is_match(json_to_string(element).as_str()) { - Err(anyhow!("Was expecting {} to match \\d{{1,8}}", element)) - } else { - Ok(()) - } - } - "date" => { - if !DATE_REGEX.is_match(json_to_string(element).as_str()) { - Err(anyhow!("Was expecting a date, but got {}", element)) - } else { - Ok(()) - } - } - "time" => { - if !TIME_REGEX.is_match(json_to_string(element).as_str()) { - Err(anyhow!("Was expecting a time, but got {}", element)) - } else { - Ok(()) - } - } - "date-time" => { - if !DATETIME_REGEX.is_match(json_to_string(element).as_str()) { - Err(anyhow!("Was expecting a date-time, but got {}", element)) - } else { - Ok(()) - } - } - "UUID" | "simple UUID" | "lower-case-hyphenated UUID" | "upper-case-hyphenated UUID" | "URN UUID" => { - if Uuid::parse_str(json_to_string(element).as_str()).is_err() { - Err(anyhow!("Was expecting an UUID, but got {}", element)) - } else { - Ok(()) - } - } - "boolean" => { - let string = json_to_string(element); - if string == "true" || string == "false" { - Ok(()) - } else { - Err(anyhow!("Was expecting a boolean, but got {}", element)) - } - } - _ => Err(anyhow!("Invalid type: {}", value_type)) - } -} diff --git a/compatibility-suite/tests/shared_steps/provider.rs b/compatibility-suite/tests/shared_steps/provider.rs deleted file mode 100644 index 9ffac4210..000000000 --- a/compatibility-suite/tests/shared_steps/provider.rs +++ /dev/null @@ -1,933 +0,0 @@ -use std::collections::hash_map::Entry; -use std::collections::HashMap; -use std::fs::File; -use std::io::Read; -use std::path::PathBuf; -use std::sync::{Arc, Mutex}; -use std::sync::atomic::{AtomicBool, Ordering}; - -use anyhow::anyhow; -use async_trait::async_trait; -use bytes::Bytes; -use cucumber::{given, then, when, World}; -use cucumber::gherkin::Step; -use itertools::{Either, Itertools}; -use maplit::hashmap; -use pact_models::{Consumer, generators, matchingrules, PactSpecification, Provider}; -use pact_models::bodies::OptionalBody; -use pact_models::content_types::{ContentType, JSON, XML}; -use pact_models::generators::Generator; -use pact_models::headers::parse_header; -use pact_models::http_parts::HttpPart; -use pact_models::matchingrules::MatchingRule; -use pact_models::pact::{Pact, read_pact}; -use pact_models::provider_states::ProviderState; -use pact_models::query_strings::parse_query_string; -use pact_models::request::Request; -use pact_models::response::Response; -use pact_models::sync_interaction::RequestResponseInteraction; -use pact_models::sync_pact::RequestResponsePact; -use pact_models::v4::http_parts::HttpRequest; -use reqwest::Client; -use serde_json::{json, Value}; -use uuid::Uuid; - -use pact_matching::Mismatch; -use pact_mock_server::matching::MatchResult; -use pact_mock_server::mock_server::{MockServer, MockServerConfig}; -use pact_verifier::{ - FilterInfo, - PactSource, - ProviderInfo, - ProviderTransport, - PublishOptions, - VerificationOptions, - verify_provider_async -}; -use pact_verifier::callback_executors::{ProviderStateExecutor, RequestFilterExecutor}; -use pact_verifier::verification_result::{VerificationExecutionResult, VerificationMismatchResult}; - -use crate::shared_steps::{setup_body, setup_common_interactions}; - -#[derive(Debug, World)] -pub struct ProviderWorld { - pub spec_version: PactSpecification, - pub interactions: Vec, - pub provider_key: String, - pub provider_server: Arc>, - pub provider_info: ProviderInfo, - pub sources: Vec, - pub publish_options: Option, - pub verification_results: VerificationExecutionResult, - pub mock_brokers: Vec>>, - pub provider_state_executor: Arc, - pub request_filter_data: HashMap -} - -impl ProviderWorld { - pub(crate) fn verification_options(&self) -> VerificationOptions { - VerificationOptions { - request_filter: if self.request_filter_data.is_empty() { - None - } else { - Some(Arc::new(ProviderWorldRequestFilter { - request_filter_data: self.request_filter_data.clone() - })) - }, - .. VerificationOptions::default() - } - } -} - -impl Default for ProviderWorld { - fn default() -> Self { - ProviderWorld { - spec_version: PactSpecification::V1, - interactions: vec![], - provider_key: "".to_string(), - provider_server: Default::default(), - provider_info: ProviderInfo::default(), - sources: vec![], - publish_options: None, - verification_results: VerificationExecutionResult { - result: false, - .. VerificationExecutionResult::new() - }, - mock_brokers: vec![], - provider_state_executor: Default::default(), - request_filter_data: Default::default() - } - } -} - -#[derive(Debug, Default)] -pub struct MockProviderStateExecutor { - pub params: Arc>>, - pub fail_mode: AtomicBool -} - -impl MockProviderStateExecutor { - pub fn set_fail_mode(&self, mode: bool) { - self.fail_mode.store(mode, Ordering::Relaxed); - } - - pub fn was_called(&self, is_setup: bool) -> bool { - let params = self.params.lock().unwrap(); - params.iter().find(|(_, setup)| *setup == is_setup).is_some() - } - - pub fn was_called_for_state(&self, state_name: &str, is_setup: bool) -> bool { - let params = self.params.lock().unwrap(); - params.iter().find(|(state, setup)| { - state.name == state_name && *setup == is_setup - }).is_some() - } - - pub fn was_called_for_state_with_params( - &self, - state_name: &str, - state_params: &HashMap, - is_setup: bool - ) -> bool { - let params = self.params.lock().unwrap(); - params.iter().find(|(state, setup)| { - state.name == state_name && - state.params == *state_params && - *setup == is_setup - }).is_some() - } -} - -#[derive(Debug, Default, Clone)] -pub struct ProviderWorldRequestFilter { - pub request_filter_data: HashMap -} - -impl RequestFilterExecutor for ProviderWorldRequestFilter { - fn call(self: Arc, request: &HttpRequest) -> HttpRequest { - let mut request = request.clone(); - - if let Some(path) = self.request_filter_data.get("path") { - request.path = path.clone(); - } - - if let Some(query) = self.request_filter_data.get("query") { - request.query = parse_query_string(query); - } - - if let Some(headers) = self.request_filter_data.get("headers") { - if !headers.is_empty() { - let headers = headers.split(",") - .map(|header| { - let key_value = header.strip_prefix("'").unwrap_or(header) - .strip_suffix("'").unwrap_or(header) - .splitn(2, ":") - .map(|v| v.trim()) - .collect::>(); - (key_value[0].to_string(), parse_header(key_value[0], key_value[1])) - }).collect(); - request.headers = Some(headers); - } - } - - if let Some(body) = self.request_filter_data.get("body") { - if !body.is_empty() { - if body.starts_with("JSON:") { - request.add_header("content-type", vec!["application/json"]); - request.body = OptionalBody::Present(Bytes::from(body.strip_prefix("JSON:").unwrap_or(body).to_string()), - Some(JSON.clone()), None); - } else if body.starts_with("XML:") { - request.add_header("content-type", vec!["application/xml"]); - request.body = OptionalBody::Present(Bytes::from(body.strip_prefix("XML:").unwrap_or(body).to_string()), - Some(XML.clone()), None); - } else { - let ct = if body.ends_with(".json") { - "application/json" - } else if body.ends_with(".xml") { - "application/xml" - } else { - "text/plain" - }; - request.headers_mut().insert("content-type".to_string(), vec![ct.to_string()]); - - let mut f = File::open(format!("pact-compatibility-suite/fixtures/{}", body)) - .expect(format!("could not load fixture '{}'", body).as_str()); - let mut buffer = Vec::new(); - f.read_to_end(&mut buffer) - .expect(format!("could not read fixture '{}'", body).as_str()); - request.body = OptionalBody::Present(Bytes::from(buffer), - ContentType::parse(ct).ok(), None); - } - } - } - - request - } - - fn call_non_http( - &self, - _request_body: &OptionalBody, - _metadata: &HashMap> - ) -> (OptionalBody, HashMap>) { - unimplemented!() - } -} - -#[async_trait] -impl ProviderStateExecutor for MockProviderStateExecutor { - async fn call( - self: Arc, - _interaction_id: Option, - provider_state: &ProviderState, - setup: bool, - _client: Option<&Client> - ) -> anyhow::Result> { - let mut lock = self.params.try_lock(); - if let Ok(ref mut params) = lock { - params.push((provider_state.clone(), setup)); - } - - if self.fail_mode.load(Ordering::Relaxed) { - Err(anyhow!("ProviderStateExecutor is in fail mode")) - } else { - Ok(hashmap! {}) - } - } - - fn teardown(self: &Self) -> bool { - return true - } -} - -#[given("the following HTTP interactions have been defined:")] -fn the_following_http_interactions_have_been_setup(world: &mut ProviderWorld, step: &Step) { - if let Some(table) = step.table.as_ref() { - let interactions = setup_common_interactions(table); - world.interactions.extend(interactions); - } -} - -#[given(expr = "a provider is started that returns the response from interaction {int}")] -#[allow(deprecated)] -async fn a_provider_is_started_that_returns_the_response_from_interaction(world: &mut ProviderWorld, num: usize) -> anyhow::Result<()> { - let pact = RequestResponsePact { - consumer: Consumer { name: "v1-compatibility-suite-c".to_string() }, - provider: Provider { name: "p".to_string() }, - interactions: vec![ world.interactions.get(num - 1).unwrap().clone() ], - specification_version: world.spec_version, - .. RequestResponsePact::default() - }; - world.provider_key = Uuid::new_v4().to_string(); - let config = MockServerConfig { - pact_specification: world.spec_version, - .. MockServerConfig::default() - }; - let (mock_server, future) = MockServer::new( - world.provider_key.clone(), pact.boxed(), "[::1]:0".parse()?, config - ).await.map_err(|err| anyhow!(err))?; - tokio::spawn(future); - world.provider_server = mock_server; - - let ms = world.provider_server.lock().unwrap(); - world.provider_info = ProviderInfo { - name: "p".to_string(), - host: "[::1]".to_string(), - port: ms.port, - transports: vec![ProviderTransport { - port: ms.port, - .. ProviderTransport::default() - }], - .. ProviderInfo::default() - }; - - Ok(()) -} - -#[given(expr = "a provider is started that returns the response from interaction {int}, with the following changes:")] -#[allow(deprecated)] -async fn a_provider_is_started_that_returns_the_response_from_interaction_with_the_following_changes( - world: &mut ProviderWorld, - step: &Step, - num: usize -) -> anyhow::Result<()> { - let mut interaction = world.interactions.get(num - 1).unwrap().clone(); - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "status" => interaction.response.status = value.parse().unwrap(), - "headers" => { - let headers = interaction.response.headers_mut(); - let headers_to_add = value.split(",") - .map(|header| { - let key_value = header.strip_prefix("'").unwrap_or(header) - .strip_suffix("'").unwrap_or(header) - .splitn(2, ":") - .map(|v| v.trim()) - .collect::>(); - (key_value[0].to_string(), parse_header(key_value[0], key_value[1])) - }); - for (k, v) in headers_to_add { - match headers.entry(k) { - Entry::Occupied(mut entry) => { - entry.get_mut().extend_from_slice(&v); - } - Entry::Vacant(entry) => { - entry.insert(v); - } - } - } - }, - "body" => { - setup_body(value, &mut interaction.response, None); - }, - _ => {} - } - } - } - } - - let pact = RequestResponsePact { - consumer: Consumer { name: "v1-compatibility-suite-c".to_string() }, - provider: Provider { name: "p".to_string() }, - interactions: vec![interaction], - specification_version: world.spec_version, - .. RequestResponsePact::default() - }; - world.provider_key = Uuid::new_v4().to_string(); - let config = MockServerConfig { - pact_specification: world.spec_version, - .. MockServerConfig::default() - }; - let (mock_server, future) = MockServer::new( - world.provider_key.clone(), pact.boxed(), "[::1]:0".parse()?, config - ).await.map_err(|err| anyhow!(err))?; - tokio::spawn(future); - world.provider_server = mock_server; - - let ms = world.provider_server.lock().unwrap(); - world.provider_info = ProviderInfo { - name: "p".to_string(), - host: "[::1]".to_string(), - port: ms.port, - transports: vec![ProviderTransport { - port: ms.port, - .. ProviderTransport::default() - }], - .. ProviderInfo::default() - }; - - Ok(()) -} - -#[given(expr = "a Pact file for interaction {int} is to be verified")] -fn a_pact_file_for_interaction_is_to_be_verified(world: &mut ProviderWorld, num: usize) -> anyhow::Result<()> { - let pact = RequestResponsePact { - consumer: Consumer { name: format!("c_{}", num) }, - provider: Provider { name: "p".to_string() }, - interactions: vec![ world.interactions.get(num - 1).unwrap().clone() ], - specification_version: world.spec_version, - .. RequestResponsePact::default() - }; - world.sources.push(PactSource::String(pact.to_json(world.spec_version)?.to_string())); - Ok(()) -} - -#[given(expr = "a Pact file for interaction {int} is to be verified with a provider state {string} defined")] -fn a_pact_file_for_interaction_is_to_be_verified_with_a_provider_state( - world: &mut ProviderWorld, - num: usize, - state: String -) -> anyhow::Result<()> { - let mut interaction = world.interactions.get(num - 1).unwrap().clone(); - interaction.provider_states.push(ProviderState { - name: state, - params: Default::default(), - }); - let pact = RequestResponsePact { - consumer: Consumer { name: format!("c_{}", num) }, - provider: Provider { name: "p".to_string() }, - interactions: vec![interaction], - specification_version: world.spec_version, - .. RequestResponsePact::default() - }; - world.sources.push(PactSource::String(pact.to_json(world.spec_version)?.to_string())); - Ok(()) -} - -#[given(expr = "a Pact file for interaction {int} is to be verified with the following provider states defined:")] -fn a_pact_file_for_interaction_is_to_be_verified_with_the_following_provider_states_defined( - world: &mut ProviderWorld, - step: &Step, - num: usize -) -> anyhow::Result<()> { - let mut interaction = world.interactions.get(num - 1).unwrap().clone(); - - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap().iter() - .enumerate() - .map(|(index, h)| (index, h.clone())) - .collect::>(); - for values in table.rows.iter().skip(1) { - let data = values.iter().enumerate() - .map(|(index, v)| (headers.get(&index).unwrap().as_str(), v.clone())) - .collect::>(); - if let Some(parameters) = data.get("Parameters") { - let json: Value = serde_json::from_str(parameters.as_str()).unwrap(); - interaction.provider_states.push(ProviderState { - name: data.get("State Name").unwrap().clone(), - params: json.as_object().unwrap().iter().map(|(k, v)| (k.clone(), v.clone())).collect() - }); - } else { - interaction.provider_states.push(ProviderState { - name: data.get("State Name").unwrap().clone(), - params: Default::default(), - }); - } - } - } else { - return Err(anyhow!("No data table defined")); - } - - let pact = RequestResponsePact { - consumer: Consumer { name: format!("c_{}", num) }, - provider: Provider { name: "p".to_string() }, - interactions: vec![interaction], - specification_version: world.spec_version, - .. RequestResponsePact::default() - }; - world.sources.push(PactSource::String(pact.to_json(world.spec_version)?.to_string())); - Ok(()) -} - -#[when("the verification is run")] -async fn the_verification_is_run(world: &mut ProviderWorld) -> anyhow::Result<()> { - let options = world.verification_options(); - world.verification_results = verify_provider_async( - world.provider_info.clone(), - world.sources.clone(), - FilterInfo::None, - vec![], - &options, - world.publish_options.as_ref(), - &world.provider_state_executor, - None - ).await?; - Ok(()) -} - -#[then("the verification will be successful")] -fn the_verification_will_be_successful(world: &mut ProviderWorld) -> anyhow::Result<()> { - if world.verification_results.result { - Ok(()) - } else { - Err(anyhow!("Verification failed")) - } -} - -#[given(expr = "a provider is started that returns the responses from interactions {string}")] -#[allow(deprecated)] -async fn a_provider_is_started_that_returns_the_responses_from_interactions( - world: &mut ProviderWorld, - ids: String -) -> anyhow::Result<()> { - let interactions = ids.split(",") - .map(|id| id.trim().parse::().unwrap()) - .map(|index| world.interactions.get(index - 1).unwrap().clone()) - .collect(); - let pact = RequestResponsePact { - consumer: Consumer { name: "v1-compatibility-suite-c".to_string() }, - provider: Provider { name: "p".to_string() }, - interactions, - specification_version: world.spec_version, - .. RequestResponsePact::default() - }; - world.provider_key = Uuid::new_v4().to_string(); - let config = MockServerConfig { - pact_specification: world.spec_version, - .. MockServerConfig::default() - }; - let (mock_server, future) = MockServer::new( - world.provider_key.clone(), pact.boxed(), "[::1]:0".parse()?, config - ).await.map_err(|err| anyhow!(err))?; - tokio::spawn(future); - world.provider_server = mock_server; - - let ms = world.provider_server.lock().unwrap(); - world.provider_info = ProviderInfo { - name: "p".to_string(), - host: "[::1]".to_string(), - port: ms.port, - transports: vec![ProviderTransport { - port: ms.port, - .. ProviderTransport::default() - }], - .. ProviderInfo::default() - }; - Ok(()) -} - -#[then("the verification will NOT be successful")] -fn the_verification_will_not_be_successful(world: &mut ProviderWorld) -> anyhow::Result<()> { - if world.verification_results.result { - Err(anyhow!("Was expecting the verification to fail")) - } else { - Ok(()) - } -} - -#[then(expr = "the verification results will contain a {string} error")] -fn the_verification_results_will_contain_a_error(world: &mut ProviderWorld, err: String) -> anyhow::Result<()> { - if world.verification_results.errors.iter().any(|(_, r)| { - match r { - VerificationMismatchResult::Mismatches { mismatches, .. } => { - mismatches.iter().any(|mismatch| { - match mismatch { - Mismatch::MethodMismatch { .. } => false, - Mismatch::PathMismatch { .. } => false, - Mismatch::StatusMismatch { .. } => err == "Response status did not match", - Mismatch::QueryMismatch { .. } => false, - Mismatch::HeaderMismatch { .. } => err == "Headers had differences", - Mismatch::BodyTypeMismatch { .. } => false, - Mismatch::BodyMismatch { .. } => err == "Body had differences", - Mismatch::MetadataMismatch { .. } => false - } - }) - } - VerificationMismatchResult::Error { error, .. } => match err.as_str() { - "State change request failed" => error == "One or more of the setup state change handlers has failed", - _ => error.as_str() == err - } - } - }) { - Ok(()) - } else { - Err(anyhow!("Did not find error message in verification results")) - } -} - -#[given(expr = "a Pact file for interaction {int} is to be verified from a Pact broker")] -async fn a_pact_file_for_interaction_is_to_be_verified_from_a_pact_broker( - world: &mut ProviderWorld, - num: usize -) -> anyhow::Result<()> { - let interaction = world.interactions.get(num - 1).unwrap().clone(); - let pact = RequestResponsePact { - consumer: Consumer { name: format!("c_{}", num) }, - provider: Provider { name: "p".to_string() }, - interactions: vec![interaction.clone()], - specification_version: world.spec_version, - .. RequestResponsePact::default() - }; - let mut pact_json = pact.to_json(world.spec_version)?; - let pact_json_inner = pact_json.as_object_mut().unwrap(); - pact_json_inner.insert("_links".to_string(), json!({ - "pb:publish-verification-results": { - "title": "Publish verification results", - "href": format!("http://localhost:1234/pacts/provider/p/consumer/c_{}/verification-results", num) - } - })); - let interactions_json = pact_json_inner.get_mut("interactions").unwrap().as_array_mut().unwrap(); - let interaction_json = interactions_json.get_mut(0).unwrap().as_object_mut().unwrap(); - interaction_json.insert("_id".to_string(), json!(interaction.id.unwrap())); - - let f = PathBuf::from(format!("pact-compatibility-suite/fixtures/pact-broker_c{}.json", num)); - let mut broker_pact = read_pact(&*f) - .expect(format!("could not load fixture 'pact-broker_c{}.json'", num).as_str()) - .as_request_response_pact().unwrap(); - - // AAARGH! My head. Adding a Pact Interaction to a Pact file for fetching a Pact file for verification - let matching_rules = matchingrules! { - "body" => { "$._links.pb:publish-verification-results.href" => [ - MatchingRule::Regex(format!(".*(\\/pacts\\/provider\\/p\\/consumer\\/c_{}\\/verification-results)", num)) - ] } - }; - let generators = generators! { - "BODY" => { - "$._links.pb:publish-verification-results.href" => Generator::MockServerURL( - format!("http://localhost:1234/pacts/provider/p/consumer/c_{}/verification-results", num), - format!(".*(\\/pacts\\/provider\\/p\\/consumer\\/c_{}\\/verification-results)", num) - ) - } - }; - let interaction = RequestResponseInteraction { - request: Request { - path: format!("/pacts/provider/p/consumer/c_{}", num), - .. Request::default() - }, - response: Response { - headers: Some(hashmap!{ - "content-type".to_string() => vec![ "application/json".to_string() ] - }), - body: OptionalBody::Present(Bytes::from(pact_json.to_string()), - Some(JSON.clone()), None), - matching_rules, - generators, - .. Response::default() - }, - .. RequestResponseInteraction::default() - }; - broker_pact.interactions.push(interaction); - - let config = MockServerConfig { - .. MockServerConfig::default() - }; - let (mock_server, future) = MockServer::new( - Uuid::new_v4().to_string(), broker_pact.boxed(), "127.0.0.1:0".parse()?, config - ).await.map_err(|err| anyhow!(err))?; - tokio::spawn(future); - let broker_port = { - let ms = mock_server.lock().unwrap(); - ms.port - }; - world.mock_brokers.push(mock_server); - - world.sources.push(PactSource::BrokerWithDynamicConfiguration { - provider_name: "p".to_string(), - broker_url: format!("http://localhost:{}", broker_port.unwrap()), - enable_pending: false, - include_wip_pacts_since: None, - provider_tags: vec![], - provider_branch: None, - selectors: vec![], - auth: None, - links: vec![], - }); - Ok(()) -} - -#[then("a verification result will NOT be published back")] -fn a_verification_result_will_not_be_published_back(world: &mut ProviderWorld) -> anyhow::Result<()> { - let verification_results = world.mock_brokers.iter().any(|broker| { - let ms = broker.lock().unwrap(); - let verification_requests = ms.metrics.requests_by_path.iter() - .find(|(path, _)| { - path.ends_with("/verification-results") - }) - .map(|(_, count)| *count) - .unwrap_or(0); - verification_requests > 0 - }); - if verification_results { - Err(anyhow!("Was expecting no verification results")) - } else { - Ok(()) - } -} - -#[given("publishing of verification results is enabled")] -fn publishing_of_verification_results_is_enabled(world: &mut ProviderWorld) { - world.publish_options = Some(PublishOptions { - provider_version: Some("1.2.3".to_string()), - build_url: None, - provider_tags: vec![], - provider_branch: None, - }); -} - -#[then(expr = "a successful verification result will be published back for interaction \\{{int}}")] -fn a_successful_verification_result_will_be_published_back_for_interaction(world: &mut ProviderWorld, num: usize) -> anyhow::Result<()> { - let verification_results = world.mock_brokers.iter().any(|broker| { - let ms = broker.lock().unwrap(); - let vec = ms.matches(); - let verification_request = vec.iter() - .find(|result| { - let expected_path = format!("/pacts/provider/p/consumer/c_{}/verification-results", num); - match result { - MatchResult::RequestMatch(req, _, _) => req.path == expected_path, - MatchResult::RequestMismatch(req, _, _) => req.path == expected_path, - MatchResult::RequestNotFound(req) => req.path == expected_path, - MatchResult::MissingRequest(req) => req.path == expected_path - } - }); - if let Some(result) = verification_request { - match result { - MatchResult::RequestMatch(req, _, _) => if let Some(body) = req.body.value() { - if let Ok(json) = serde_json::from_slice::(body.as_ref()) { - if let Some(success) = json.get("success") { - match success { - Value::Bool(b) => *b, - _ => false - } - } else { - false - } - } else { - false - } - } else { - false - }, - _ => false - } - } else { - false - } - }); - if verification_results { - Ok(()) - } else { - Err(anyhow!("Either no verification results was published, or it was incorrect")) - } -} - -#[then(expr = "a failed verification result will be published back for the interaction \\{{int}}")] -fn a_failed_verification_result_will_be_published_back_for_the_interaction(world: &mut ProviderWorld, num: usize) -> anyhow::Result<()> { - let verification_results = world.mock_brokers.iter().any(|broker| { - let ms = broker.lock().unwrap(); - let vec = ms.matches(); - let verification_request = vec.iter() - .find(|result| { - let expected_path = format!("/pacts/provider/p/consumer/c_{}/verification-results", num); - match result { - MatchResult::RequestMatch(req, _, _) => req.path == expected_path, - MatchResult::RequestMismatch(req, _, _) => req.path == expected_path, - MatchResult::RequestNotFound(req) => req.path == expected_path, - MatchResult::MissingRequest(req) => req.path == expected_path - } - }); - if let Some(result) = verification_request { - match result { - MatchResult::RequestMatch(req, _, _) => if let Some(body) = req.body.value() { - if let Ok(json) = serde_json::from_slice::(body.as_ref()) { - if let Some(success) = json.get("success") { - match success { - Value::Bool(b) => !*b, - _ => false - } - } else { - false - } - } else { - false - } - } else { - false - }, - _ => false - } - } else { - false - } - }); - if verification_results { - Ok(()) - } else { - Err(anyhow!("Either no verification results was published, or it was incorrect")) - } -} - -#[given("a provider state callback is configured")] -fn a_provider_state_callback_is_configured(world: &mut ProviderWorld) -> anyhow::Result<()> { - world.provider_state_executor.set_fail_mode(false); - Ok(()) -} - -#[given("a provider state callback is configured, but will return a failure")] -fn a_provider_state_callback_is_configured_but_will_return_a_failure(world: &mut ProviderWorld) -> anyhow::Result<()> { - world.provider_state_executor.set_fail_mode(true); - Ok(()) -} - -#[then("the provider state callback will be called before the verification is run")] -fn the_provider_state_callback_will_be_called_before_the_verification_is_run(world: &mut ProviderWorld) -> anyhow::Result<()> { - if world.provider_state_executor.was_called(true) { - Ok(()) - } else { - Err(anyhow!("Provider state callback was not called")) - } -} - -#[then(expr = "the provider state callback will receive a setup call with {string} as the provider state parameter")] -fn the_provider_state_callback_will_receive_a_setup_call_with_as_the_provider_state_parameter( - world: &mut ProviderWorld, - state: String -) -> anyhow::Result<()> { - if world.provider_state_executor.was_called_for_state(state.as_str(), true) { - Ok(()) - } else { - Err(anyhow!("Provider state callback was not called for state '{}'", state)) - } -} - -#[then(expr = "the provider state callback will receive a setup call with {string} and the following parameters:")] -fn the_provider_state_callback_will_receive_a_setup_call_with_and_the_following_parameters( - world: &mut ProviderWorld, - step: &Step, - state: String -) -> anyhow::Result<()> { - validate_state_call(world, step, state, true) -} - -#[then(expr = "the provider state callback will receive a teardown call {string} and the following parameters:")] -fn the_provider_state_callback_will_receive_a_teardown_call_with_and_the_following_parameters( - world: &mut ProviderWorld, - step: &Step, - state: String -) -> anyhow::Result<()> { - validate_state_call(world, step, state, false) -} - -fn validate_state_call(world: &mut ProviderWorld, step: &Step, state: String, is_setup: bool) -> anyhow::Result<()> { - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap().iter() - .enumerate() - .map(|(index, h)| (index, h.clone())) - .collect::>(); - if let Some(values) = table.rows.get(1) { - let parameters = values.iter().enumerate() - .map(|(index, v)| { - let key = headers.get(&index).unwrap(); - let value = serde_json::from_str(v).unwrap(); - (key.clone(), value) - }) - .collect::>(); - if world.provider_state_executor.was_called_for_state_with_params(state.as_str(), ¶meters, is_setup) { - Ok(()) - } else { - Err(anyhow!("Provider state callback was not called for state '{}' with params {:?}", state, parameters)) - } - } else { - Err(anyhow!("No data table defined")) - } - } else { - Err(anyhow!("No data table defined")) - } -} - -#[then("the provider state callback will be called after the verification is run")] -fn the_provider_state_callback_will_be_called_after_the_verification_is_run(world: &mut ProviderWorld) -> anyhow::Result<()> { - if world.provider_state_executor.was_called(false) { - Ok(()) - } else { - Err(anyhow!("Provider state callback teardown was not called")) - } -} - -#[then(expr = "the provider state callback will receive a teardown call {string} as the provider state parameter")] -fn the_provider_state_callback_will_receive_a_teardown_call_as_the_provider_state_parameter( - world: &mut ProviderWorld, - state: String -) -> anyhow::Result<()> { - if world.provider_state_executor.was_called_for_state(state.as_str(), false) { - Ok(()) - } else { - Err(anyhow!("Provider state teardown callback was not called for state '{}'", state)) - } -} - -#[then("the provider state callback will NOT receive a teardown call")] -fn the_provider_state_callback_will_not_receive_a_teardown_call(world: &mut ProviderWorld) -> anyhow::Result<()> { - if world.provider_state_executor.was_called(false) { - Err(anyhow!("Provider state callback teardown was called but was expecting no call")) - } else { - Ok(()) - } -} - -#[then(expr = "a warning will be displayed that there was no provider state callback configured for provider state {string}")] -fn a_warning_will_be_displayed_that_there_was_no_provider_state_callback_configured( - _world: &mut ProviderWorld, - _state: String -) -> anyhow::Result<()> { - // Unable to verify this, as the default provider state callback handler displays this message, - // and this has been overwritten for the test suite. The verifier will not display it. - Ok(()) -} - -#[given("a request filter is configured to make the following changes:")] -fn a_request_filter_is_configured_to_make_the_following_changes( - world: &mut ProviderWorld, - step: &Step -) -> anyhow::Result<()> { - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap().iter() - .enumerate() - .map(|(index, h)| (index, h.clone())) - .collect::>(); - if let Some(values) = table.rows.get(1) { - world.request_filter_data = values.iter().enumerate() - .map(|(index, v)| (headers.get(&index).cloned(), v.clone())) - .filter_map(|(k, v)| k.map(|k| (k.clone(), v.clone()))) - .collect(); - Ok(()) - } else { - Err(anyhow!("No data table defined")) - } - } else { - Err(anyhow!("No data table defined")) - } -} - -#[then(expr = "the request to the provider will contain the header {string}")] -fn the_request_to_the_provider_will_contain_the_header( - world: &mut ProviderWorld, - header: String -) -> anyhow::Result<()> { - let header = header.splitn(2, ':') - .map(|s| s.trim()) - .collect_vec(); - let matches = { - let guard = world.provider_server.lock().unwrap(); - guard.matches() - }; - if matches.iter().all(|m| { - let req = match m { - MatchResult::RequestMatch(_, _, req) => req, - MatchResult::RequestMismatch(_, req, _) => req, - MatchResult::RequestNotFound(req) => req, - MatchResult::MissingRequest(req) => req - }; - if let Some(headers) = &req.headers { - let key = header[0].to_lowercase(); - headers.contains_key(key.as_str()) && headers.get(key.as_str()).unwrap()[0] == header[1] - } else { - false - } - }) { - Ok(()) - } else { - Err(anyhow!("Not all request to the provider contained the required header")) - } -} diff --git a/compatibility-suite/tests/v1_consumer.rs b/compatibility-suite/tests/v1_consumer.rs deleted file mode 100644 index ad97f3227..000000000 --- a/compatibility-suite/tests/v1_consumer.rs +++ /dev/null @@ -1,26 +0,0 @@ -use cucumber::World; -use tracing_subscriber::EnvFilter; - -use crate::shared_steps::consumer::ConsumerWorld; - -pub mod shared_steps; - -#[tokio::main] -async fn main() { - let format = tracing_subscriber::fmt::format().pretty(); - tracing_subscriber::fmt() - .with_env_filter(EnvFilter::from_default_env()) - .event_format(format) - .init(); - - ConsumerWorld::cucumber() - .fail_on_skipped() - .before(|_feature, _, scenario, world| Box::pin(async move { - world.scenario_id = scenario.name.clone(); - })) - .filter_run_and_exit("pact-compatibility-suite/features/V1", |feature, _rule, scenario| { - feature.tags.iter().any(|tag| tag == "consumer") && - !scenario.tags.iter().any(|t| t == "wip") - }) - .await; -} diff --git a/compatibility-suite/tests/v1_provider.rs b/compatibility-suite/tests/v1_provider.rs deleted file mode 100644 index efeb698a9..000000000 --- a/compatibility-suite/tests/v1_provider.rs +++ /dev/null @@ -1,36 +0,0 @@ -use cucumber::World; -use tracing_subscriber::EnvFilter; - -use crate::shared_steps::provider::ProviderWorld; - -pub mod shared_steps; - -#[tokio::main] -async fn main() { - let format = tracing_subscriber::fmt::format().pretty(); - tracing_subscriber::fmt() - .with_env_filter(EnvFilter::from_default_env()) - .event_format(format) - .init(); - - ProviderWorld::cucumber() - .fail_on_skipped() - .max_concurrent_scenarios(1) - .after(|_feature, _, _scenario, _status, world| Box::pin(async move { - if let Some(world) = world { - { - let mut ms = world.provider_server.lock().unwrap(); - let _ = ms.shutdown(); - } - for broker in &world.mock_brokers { - let mut ms = broker.lock().unwrap(); - let _ = ms.shutdown(); - } - } - })) - .filter_run_and_exit("pact-compatibility-suite/features/V1", |feature, _rule, scenario| { - feature.tags.iter().any(|tag| tag == "provider") && - !scenario.tags.iter().any(|t| t == "wip") - }) - .await; -} diff --git a/compatibility-suite/tests/v2_consumer.rs b/compatibility-suite/tests/v2_consumer.rs deleted file mode 100644 index 59336abc1..000000000 --- a/compatibility-suite/tests/v2_consumer.rs +++ /dev/null @@ -1,26 +0,0 @@ -use cucumber::World; -use tracing_subscriber::EnvFilter; - -use crate::shared_steps::consumer::ConsumerWorld; - -pub mod shared_steps; - -#[tokio::main] -async fn main() { - let format = tracing_subscriber::fmt::format().pretty(); - tracing_subscriber::fmt() - .with_env_filter(EnvFilter::from_default_env()) - .event_format(format) - .init(); - - ConsumerWorld::cucumber() - .fail_on_skipped() - .before(|_feature, _, scenario, world| Box::pin(async move { - world.scenario_id = scenario.name.clone(); - })) - .filter_run_and_exit("pact-compatibility-suite/features/V2", |feature, _rule, scenario| { - feature.tags.iter().any(|tag| tag == "consumer") && - !scenario.tags.iter().any(|t| t == "wip") - }) - .await; -} diff --git a/compatibility-suite/tests/v2_provider.rs b/compatibility-suite/tests/v2_provider.rs deleted file mode 100644 index 31ced349e..000000000 --- a/compatibility-suite/tests/v2_provider.rs +++ /dev/null @@ -1,36 +0,0 @@ -use cucumber::World; -use tracing_subscriber::EnvFilter; - -use crate::shared_steps::provider::ProviderWorld; - -pub mod shared_steps; - -#[tokio::main] -async fn main() { - let format = tracing_subscriber::fmt::format().pretty(); - tracing_subscriber::fmt() - .with_env_filter(EnvFilter::from_default_env()) - .event_format(format) - .init(); - - ProviderWorld::cucumber() - .fail_on_skipped() - .max_concurrent_scenarios(1) - .after(|_feature, _, _scenario, _status, world| Box::pin(async move { - if let Some(world) = world { - { - let mut ms = world.provider_server.lock().unwrap(); - let _ = ms.shutdown(); - } - for broker in &world.mock_brokers { - let mut ms = broker.lock().unwrap(); - let _ = ms.shutdown(); - } - } - })) - .filter_run_and_exit("pact-compatibility-suite/features/V2", |feature, _rule, scenario| { - feature.tags.iter().any(|tag| tag == "provider") && - !scenario.tags.iter().any(|t| t == "wip") - }) - .await; -} diff --git a/compatibility-suite/tests/v3.rs b/compatibility-suite/tests/v3.rs deleted file mode 100644 index 4d6f5d107..000000000 --- a/compatibility-suite/tests/v3.rs +++ /dev/null @@ -1,24 +0,0 @@ -use cucumber::World; -use tracing_subscriber::EnvFilter; - -use crate::v3_steps::V3World; - -mod shared_steps; -mod v3_steps; - -#[tokio::main] -async fn main() { - let format = tracing_subscriber::fmt::format().pretty(); - tracing_subscriber::fmt() - .with_env_filter(EnvFilter::from_default_env()) - .event_format(format) - .init(); - - V3World::cucumber() - .fail_on_skipped() - .filter_run_and_exit("pact-compatibility-suite/features/V3", |feature, _rule, scenario| { - feature.tags.iter().all(|tag| tag != "provider" && tag != "message") && - !scenario.tags.iter().any(|t| t == "wip") - }) - .await; -} diff --git a/compatibility-suite/tests/v3_message.rs b/compatibility-suite/tests/v3_message.rs deleted file mode 100644 index 30027c665..000000000 --- a/compatibility-suite/tests/v3_message.rs +++ /dev/null @@ -1,96 +0,0 @@ -use base64::Engine; -use base64::engine::general_purpose::STANDARD as BASE64; -use cucumber::World; -use itertools::Itertools; -use rocket::http::{ContentType, Header}; -use rocket::Responder; -use rocket::serde::json::Json; -use serde::Deserialize; -use serde_json::json; -use tracing::debug; -use tracing_subscriber::EnvFilter; - -use v3_steps::message::V3MessageWorld; - -use crate::v3_steps::message::MESSAGES; - -mod shared_steps; -mod v3_steps; - -#[derive(Deserialize, Default, Debug)] -struct MessageDetails { - description: String -} - -#[derive(Responder)] -struct MessageResponder<'a> { - payload: Option>, - content_type: ContentType, - metadata: Header<'a> -} - -#[rocket::post("/", data = "")] -async fn messages(request: Json) -> Option> { - let details = request.into_inner(); - debug!("Got request = {:?}", details); - let guard = MESSAGES.lock().unwrap(); - guard.get(details.description.as_str()) - .map(|message| { - let metadata = json!(message.metadata).to_string(); - MessageResponder { - payload: message.contents.value().map(|data| data.to_vec()), - content_type: message.message_content_type() - .map(|ct| ContentType::parse_flexible(ct.to_string().as_str())) - .flatten() - .unwrap_or(ContentType::Plain), - metadata: Header::new("pact-message-metadata", BASE64.encode(metadata)) - } - }) -} - -#[tokio::main(flavor = "multi_thread", worker_threads = 4)] -async fn main() { - let format = tracing_subscriber::fmt::format().pretty(); - tracing_subscriber::fmt() - .with_env_filter(EnvFilter::from_default_env()) - .event_format(format) - .init(); - - let server = rocket::build() - .mount("/", rocket::routes![messages]) - .ignite() - .await.expect("Could not start the Rocket server"); - let shutdown = server.shutdown(); - let port = server.config().port; - tokio::spawn(server.launch()); - - V3MessageWorld::cucumber() - .fail_on_skipped() - .max_concurrent_scenarios(1) - .before(move |_, _, scenario, world| { - Box::pin(async move { - world.scenario_id = scenario.name.clone(); - world.message_proxy_port = port; - }) - }) - .after(|_feature, _, _scenario, _status, world| { - Box::pin(async move { - if let Some(world) = world { - let mut guard = MESSAGES.lock().unwrap(); - let keys = guard.keys().cloned().collect_vec(); - for key in keys { - if key.starts_with(world.scenario_id.as_str()) { - guard.remove(key.as_str()); - } - } - } - }) - }) - .filter_run_and_exit("pact-compatibility-suite/features/V3", |feature, _rule, scenario| { - feature.tags.iter().any(|tag| tag == "message") && - !scenario.tags.iter().any(|t| t == "wip") - }) - .await; - - shutdown.notify(); -} diff --git a/compatibility-suite/tests/v3_provider.rs b/compatibility-suite/tests/v3_provider.rs deleted file mode 100644 index 1ca8db8c6..000000000 --- a/compatibility-suite/tests/v3_provider.rs +++ /dev/null @@ -1,40 +0,0 @@ -use cucumber::World; -use pact_models::PactSpecification; -use tracing_subscriber::EnvFilter; - -use crate::shared_steps::provider::ProviderWorld; - -pub mod shared_steps; - -#[tokio::main] -async fn main() { - let format = tracing_subscriber::fmt::format().pretty(); - tracing_subscriber::fmt() - .with_env_filter(EnvFilter::from_default_env()) - .event_format(format) - .init(); - - ProviderWorld::cucumber() - .fail_on_skipped() - .max_concurrent_scenarios(1) - .before(|_, _, _, world| Box::pin(async move { - world.spec_version = PactSpecification::V3; - })) - .after(|_feature, _, _scenario, _status, world| Box::pin(async move { - if let Some(world) = world { - { - let mut ms = world.provider_server.lock().unwrap(); - let _ = ms.shutdown(); - } - for broker in &world.mock_brokers { - let mut ms = broker.lock().unwrap(); - let _ = ms.shutdown(); - } - } - })) - .filter_run_and_exit("pact-compatibility-suite/features/V3", |feature, _rule, scenario| { - feature.tags.iter().any(|tag| tag == "provider") && - !scenario.tags.iter().any(|t| t == "wip") - }) - .await; -} diff --git a/compatibility-suite/tests/v3_steps/generators.rs b/compatibility-suite/tests/v3_steps/generators.rs deleted file mode 100644 index 43c9f6c17..000000000 --- a/compatibility-suite/tests/v3_steps/generators.rs +++ /dev/null @@ -1,329 +0,0 @@ -use std::collections::HashMap; -use std::fs::File; -use std::io::BufReader; - -use anyhow::anyhow; -use cucumber::{given, then, when}; -use cucumber::gherkin::Step; -use maplit::hashmap; -use pact_models::generators::{Generators, GeneratorTestMode}; -use pact_models::json_utils::json_to_string; -use pact_models::path_exp::DocPath; -use pact_models::request::Request; -use pact_models::response::Response; -use regex::Regex; -use serde_json::Value; - -use pact_matching::{generate_request, generate_response}; - -use crate::shared_steps::{assert_value_type, setup_body}; -use crate::v3_steps::V3World; - -#[given(expr = "a request configured with the following generators:")] -fn a_request_configured_with_the_following_generators(world: &mut V3World, step: &Step) { - let mut request = Request { - path: "/path/one".to_string(), - .. Request::default() - }; - - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "body" => setup_body(value, &mut request, None), - "generators" => { - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - let f = File::open(format!("pact-compatibility-suite/fixtures/{}", value)) - .expect(format!("could not load fixture '{}'", value).as_str()); - let reader = BufReader::new(f); - serde_json::from_reader(reader).unwrap() - }; - let mut generators = Generators::default(); - generators.load_from_map(json.as_object().unwrap()).unwrap(); - request.generators = generators; - } - _ => {} - } - } - } - } - - world.original_body = request.body.clone(); - world.request = request; -} - -#[given(expr = "a response configured with the following generators:")] -fn a_response_configured_with_the_following_generators(world: &mut V3World, step: &Step) { - let mut response = Response::default(); - - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "body" => setup_body(value, &mut response, None), - "generators" => { - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - let f = File::open(format!("pact-compatibility-suite/fixtures/{}", value)) - .expect(format!("could not load fixture '{}'", value).as_str()); - let reader = BufReader::new(f); - serde_json::from_reader(reader).unwrap() - }; - let mut generators = Generators::default(); - generators.load_from_map(json.as_object().unwrap()).unwrap(); - response.generators = generators; - } - _ => {} - } - } - } - } - - world.original_body = response.body.clone(); - world.response = response; -} - -#[given(expr = "the generator test mode is set as {string}")] -fn the_generator_test_mode_is_set_as(world: &mut V3World, mode: String) { - world.generator_test_mode = if mode == "Consumer" { - GeneratorTestMode::Consumer - } else { - GeneratorTestMode::Provider - }; -} - -#[when("the request is prepared for use")] -async fn the_request_prepared_for_use(world: &mut V3World) { - let context = world.generator_context.iter() - .map(|(k, v)| (k.as_str(), v.clone())) - .collect(); - world.generated_request = generate_request(&world.request.as_v4_request(), - &world.generator_test_mode, &context).await.as_v3_request(); - world.generated_body = world.generated_request.body.clone(); -} - -#[when("the response is prepared for use")] -async fn the_response_is_prepared_for_use(world: &mut V3World) { - let context = world.generator_context.iter() - .map(|(k, v)| (k.as_str(), v.clone())) - .collect(); - world.generated_response = generate_response(&world.response.as_v4_response(), - &world.generator_test_mode, &context).await.as_v3_response(); - world.generated_body = world.generated_response.body.clone(); -} - -#[when(expr = "the request is prepared for use with a {string} context:")] -async fn the_request_is_prepared_for_use_with_a_context( - world: &mut V3World, - step: &Step, - context_field: String -) { - // FUCK! That is all I can say about this at this point. - let context = if let Some(table) = step.table.as_ref() { - let value = table.rows.first().unwrap().first().unwrap(); - let json: Value = serde_json::from_str(value).unwrap(); - let attributes = json.as_object().unwrap(); - let map = attributes.iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect::>(); - if context_field == "providerState" { - map - } else if context_field == "mockServer" { - hashmap!{ - context_field.to_string() => Value::Object(map.iter() - .map(|(k, v)| { - if k == "href" { - ("url".to_string(), v.clone()) - } else { - (k.clone(), v.clone()) - } - }).collect()) - } - } else { - hashmap!{ - context_field.to_string() => Value::Object(map.iter() - .map(|(k, v)| (k.clone(), v.clone())).collect()) - } - } - } else { - world.generator_context.clone() - }; - - let context = context.iter() - .map(|(k, v)| (k.as_str(), v.clone())) - .collect::>(); - world.generated_request = generate_request(&world.request.as_v4_request(), - &world.generator_test_mode, &context).await.as_v3_request(); - world.generated_body = world.generated_request.body.clone(); -} - -#[then(expr = "the body value for {string} will have been replaced with a(n) {string}")] -fn the_body_value_for_will_have_been_replaced_with_a_value( - world: &mut V3World, - path: String, - value_type: String -) -> anyhow::Result<()> { - let path = DocPath::new(path).unwrap(); - let original_json: Value = serde_json::from_str(world.original_body.value_as_string().unwrap().as_str()).unwrap(); - let pointer = path.as_json_pointer().unwrap(); - let pointer = pointer.as_str(); - let original_element = original_json.pointer(pointer).unwrap(); - let json: Value = serde_json::from_str(world.generated_body.value_as_string().unwrap().as_str()).unwrap(); - let element = json.pointer(pointer).unwrap(); - - if element == original_element { - return Err(anyhow!("Expected original ({:?}) to have been replaced", original_element)) - } - - assert_value_type(value_type, element) -} - -#[then(expr = "the body value for {string} will have been replaced with {string}")] -fn the_body_value_for_will_have_been_replaced_with_value( - world: &mut V3World, - path: String, - value: String -) -> anyhow::Result<()> { - let path = DocPath::new(path).unwrap(); - let original_json: Value = serde_json::from_str(world.original_body.value_as_string().unwrap().as_str()).unwrap(); - let pointer = path.as_json_pointer().unwrap(); - let pointer = pointer.as_str(); - let original_element = original_json.pointer(pointer).unwrap(); - let json: Value = serde_json::from_str(world.generated_body.value_as_string().unwrap().as_str()).unwrap(); - let element = json.pointer(pointer).unwrap(); - - if element == original_element { - Err(anyhow!("Expected original ({:?}) to have been replaced", original_element)) - } else if json_to_string(&element) == value { - Ok(()) - } else { - Err(anyhow!("Expected value ({:?}) to be equal to {}", element, value)) - } -} - -#[then(expr = "the request {string} will be set as {string}")] -fn the_request_will_be_set_as( - world: &mut V3World, - request_part: String, - value: String -) -> anyhow::Result<()> { - match request_part.as_str() { - "path" => { - if world.generated_request.path == value { - Ok(()) - } else { - Err(anyhow!("Expected path to be {} but was {}", value, world.generated_request.path)) - } - } - _ => Err(anyhow!("Invalid HTTP part: {}", request_part)) - } -} - -#[then(expr = "the request {string} will match {string}")] -fn the_request_will_match( - world: &mut V3World, - request_part: String, - regex: String -) -> anyhow::Result<()> { - let regex = Regex::new(regex.as_str()).unwrap(); - let key_regex = Regex::new(r"\[(.*)]").unwrap(); - if request_part.as_str() == "path" { - if regex.is_match(world.generated_request.path.as_str()) { - Ok(()) - } else { - Err(anyhow!("Expected path to match {} but was {}", regex, world.generated_request.path)) - } - } else if request_part.starts_with("header") { - let header = key_regex.captures(request_part.as_str()).unwrap().get(1).unwrap().as_str(); - if let Some(headers) = &world.generated_request.headers { - if let Some(value) = headers.get(header) { - if value.iter().all(|v| regex.is_match(v.as_ref())) { - Ok(()) - } else { - Err(anyhow!("Request header {} has a value that does not match {}", header, regex)) - } - } else { - Err(anyhow!("Request does not have header {} set", header)) - } - } else { - Err(anyhow!("Request does not have any headers set")) - } - } else if request_part.starts_with("queryParameter") { - let parameter = key_regex.captures(request_part.as_str()).unwrap().get(1).unwrap().as_str(); - if let Some(query) = &world.generated_request.query { - if let Some(value) = query.get(parameter) { - if value.iter().all(|v| { - let v = v.as_ref().map(|v| v.as_str()).unwrap_or_default(); - regex.is_match(v) - }) { - Ok(()) - } else { - Err(anyhow!("Request query parameter {} has a value that does not match {}", parameter, regex)) - } - } else { - Err(anyhow!("Request does not have query parameter {} set", parameter)) - } - } else { - Err(anyhow!("Request does not have any query parameters set")) - } - } else { - Err(anyhow!("Invalid HTTP part: {}", request_part)) - } -} - -#[then(expr = "the response {string} will not be {string}")] -fn the_response_will_not_be( - world: &mut V3World, - response_part: String, - value: String -) -> anyhow::Result<()> { - match response_part.as_str() { - "status" => { - if world.generated_response.status != value.parse::().unwrap() { - Ok(()) - } else { - Err(anyhow!("Expected status to be NOT be {} but was", value)) - } - } - _ => Err(anyhow!("Invalid HTTP part: {}", response_part)) - } -} - -#[then(expr = "the response {string} will match {string}")] -fn the_response_will_match( - world: &mut V3World, - response_part: String, - regex: String -) -> anyhow::Result<()> { - let regex = Regex::new(regex.as_str()).unwrap(); - let key_regex = Regex::new(r"\[(.*)]").unwrap(); - if response_part.as_str() == "status" { - if regex.is_match(world.generated_response.status.to_string().as_str()) { - Ok(()) - } else { - Err(anyhow!("Expected status to match {} but was {}", regex, world.generated_response.status)) - } - } else if response_part.starts_with("header") { - let header = key_regex.captures(response_part.as_str()).unwrap().get(1).unwrap().as_str(); - if let Some(headers) = &world.generated_response.headers { - if let Some(value) = headers.get(header) { - if value.iter().all(|v| regex.is_match(v.as_ref())) { - Ok(()) - } else { - Err(anyhow!("Response header {} has a value that does not match {}", header, regex)) - } - } else { - Err(anyhow!("Response does not have header {} set", header)) - } - } else { - Err(anyhow!("Response does not have any headers set")) - } - } else { - Err(anyhow!("Invalid HTTP part: {}", response_part)) - } -} \ No newline at end of file diff --git a/compatibility-suite/tests/v3_steps/http_consumer.rs b/compatibility-suite/tests/v3_steps/http_consumer.rs deleted file mode 100644 index 3b12890f6..000000000 --- a/compatibility-suite/tests/v3_steps/http_consumer.rs +++ /dev/null @@ -1,151 +0,0 @@ -use std::collections::HashMap; - -use anyhow::anyhow; -use cucumber::{given, then, when}; -use cucumber::gherkin::Step; -use pact_models::PactSpecification; -use serde_json::{json, Map, Value}; - -use pact_consumer::builders::{InteractionBuilder, PactBuilder}; - -use crate::v3_steps::V3World; - -#[given("an integration is being defined for a consumer test")] -fn an_integration_is_being_defined_for_a_consumer_test(world: &mut V3World) { - world.builder = PactBuilder::new("V3 consumer", "V3 provider"); - world.integration_builder = InteractionBuilder::new("interaction for a consumer test", ""); -} - -#[given(expr = "a provider state {string} is specified")] -fn a_provider_state_is_specified(world: &mut V3World, state: String) { - world.integration_builder.given(state); -} - -#[given(expr = "a provider state {string} is specified with the following data:")] -fn a_provider_state_is_specified_with_the_following_data( - world: &mut V3World, - step: &Step, - state: String -) -> anyhow::Result<()> { - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap().iter() - .enumerate() - .map(|(index, h)| (index, h.clone())) - .collect::>(); - if let Some(value) = table.rows.get(1) { - let data = value.iter().enumerate() - .map(|(index, v)| { - (headers.get(&index).unwrap().clone(), serde_json::from_str(v).unwrap()) - }) - .collect::>(); - world.integration_builder.given_with_params(state, &Value::Object(data)); - } - Ok(()) - } else { - Err(anyhow!("No data table defined")) - } -} - -#[when("the Pact file for the test is generated")] -fn the_pact_file_for_the_test_is_generated(world: &mut V3World) { - world.builder.push_interaction(&world.integration_builder.build()); - world.pact = world.builder.build(); - world.pact_json = world.pact.to_json(PactSpecification::V3).unwrap(); -} - -#[then(expr = "the interaction in the Pact file will contain {int} provider state(s)")] -fn the_interaction_in_the_pact_file_will_contain_provider_states( - world: &mut V3World, - states: usize -) -> anyhow::Result<()> { - let interaction = get_interaction(&world.pact_json, 0)?; - if let Some(provider_states) = interaction.get("providerStates") { - if let Some(provider_states_array) = provider_states.as_array() { - if provider_states_array.len() == states { - Ok(()) - } else { - Err(anyhow!("Expected {} provider states, but Pact had {}", states, provider_states_array.len())) - } - } else { - Err(anyhow!("providerStates not valid JSON")) - } - } else { - Err(anyhow!("No providerStates in Interaction JSON")) - } -} - -fn get_interaction(pact_json: &Value, num: usize) -> anyhow::Result { - if let Some(interactions) = pact_json.get("interactions") { - if let Some(interaction) = interactions.get(num) { - Ok(interaction.clone()) - } else { - Err(anyhow!("No interactions in Pact JSON")) - } - } else { - Err(anyhow!("Generated Pact JSON is invalid")) - } -} - -#[then(expr = "the interaction in the Pact file will contain provider state {string}")] -fn the_interaction_in_the_pact_file_will_contain_provider_state( - world: &mut V3World, - state_name: String -) -> anyhow::Result<()> { - let interaction = get_interaction(&world.pact_json, 0)?; - if let Some(provider_states) = interaction.get("providerStates") { - if let Some(provider_states_array) = provider_states.as_array() { - if provider_states_array.iter() - .find(|state| state.get("name").cloned().unwrap_or(Value::Null) == json!(state_name)) - .is_some() { - Ok(()) - } else { - Err(anyhow!("Did not find a provider state with name {}", state_name)) - } - } else { - Err(anyhow!("providerStates not valid JSON")) - } - } else { - Err(anyhow!("No providerStates in Interaction JSON")) - } -} - -#[then(expr = "the provider state {string} in the Pact file will contain the following parameters:")] -fn the_provider_state_in_the_pact_file_will_contain_the_following_parameters( - world: &mut V3World, - step: &Step, - state_name: String -) -> anyhow::Result<()> { - if let Some(table) = step.table.as_ref() { - if let Some(value) = table.rows.get(1) { - let data: Value = serde_json::from_str(value.get(0).unwrap())?; - let interaction = get_interaction(&world.pact_json, 0)?; - if let Some(provider_states) = interaction.get("providerStates") { - if let Some(provider_states_array) = provider_states.as_array() { - if let Some(state) = provider_states_array.iter() - .find(|state| state.get("name").cloned().unwrap_or(Value::Null) == json!(state_name)) { - if let Some(params) = state.get("params") { - if params == &data { - Ok(()) - } else { - Err(anyhow!("Provider state with name {} parameters {} does not equal {}", state_name, - params, data)) - } - } else { - Err(anyhow!("Provider state with name {} has no parameters", state_name)) - } - } else { - Err(anyhow!("Did not find a provider state with name {}", state_name)) - } - } else { - Err(anyhow!("providerStates not valid JSON")) - } - } else { - Err(anyhow!("No providerStates in Interaction JSON")) - } - } else { - Err(anyhow!("No data table defined")) - } - } else { - Err(anyhow!("No data table defined")) - } -} diff --git a/compatibility-suite/tests/v3_steps/http_matching.rs b/compatibility-suite/tests/v3_steps/http_matching.rs deleted file mode 100644 index c5cb5dbc9..000000000 --- a/compatibility-suite/tests/v3_steps/http_matching.rs +++ /dev/null @@ -1,204 +0,0 @@ -use std::collections::hash_map::Entry; -use std::fs::File; -use std::io::BufReader; - -use anyhow::anyhow; -use cucumber::{given, then, when}; -use cucumber::gherkin::Step; -use maplit::hashmap; -use pact_models::headers::parse_header; -use pact_models::http_parts::HttpPart; -use pact_models::interaction::Interaction; -use pact_models::matchingrules::matchers_from_json; -use pact_models::pact::Pact; -use pact_models::prelude::RequestResponseInteraction; -use pact_models::request::Request; -use pact_models::sync_pact::RequestResponsePact; -use regex::Regex; -use serde_json::{json, Value}; - -use pact_matching::{match_request, Mismatch}; - -use crate::shared_steps::setup_body; -use crate::v3_steps::V3World; - -#[given(expr = "an expected request with a(n) {string} header of {string}")] -fn an_expected_request_with_a_header_of(world: &mut V3World, header: String, value: String) { - let headers = world.expected_request.headers_mut(); - match headers.entry(header.clone()) { - Entry::Occupied(mut entry) => { - entry.insert(parse_header(header.as_str(), value.as_str())); - } - Entry::Vacant(entry) => { - entry.insert(parse_header(header.as_str(), value.as_str())); - } - } -} - -#[given(expr = "a request is received with a(n) {string} header of {string}")] -fn a_request_is_received_with_a_header_of(world: &mut V3World, header: String, value: String) { - world.received_requests.push(Request { - headers: Some(hashmap!{ header.clone() => parse_header(header.as_str(), value.as_str()) }), - .. Request::default() - }) -} - -#[given(expr = "an expected request configured with the following:")] -fn an_expected_request_configured_with_the_following(world: &mut V3World, step: &Step) { - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - let mut data = hashmap!{}; - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - data.insert(field.as_str(), value); - } - } - - if let Some(body) = data.get("body") { - setup_body(body, &mut world.expected_request, data.get("content type").map(|ct| ct.as_str())); - } - - if let Some(value) = data.get("matching rules") { - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - let f = File::open(format!("pact-compatibility-suite/fixtures/{}", value)) - .expect(format!("could not load fixture '{}'", value).as_str()); - let reader = BufReader::new(f); - serde_json::from_reader(reader).unwrap() - }; - world.expected_request.matching_rules = matchers_from_json(&json!({ - "matchingRules": json - }), &None) - .expect("Matching rules fixture is not valid JSON"); - } - } -} - -#[given(expr = "a request is received with the following:")] -fn a_request_is_received_with_the_following(world: &mut V3World, step: &Step) { - let mut request = Request::default(); - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - let mut data = hashmap!{}; - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - data.insert(field.as_str(), value); - } - } - - if let Some(body) = data.get("body") { - setup_body(body, &mut request, data.get("content type").map(|ct| ct.as_str())); - } - } - world.received_requests.push(request); -} - -#[given(expr = "the following requests are received:")] -fn the_following_requests_are_received(world: &mut V3World, step: &Step) { - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for row in table.rows.iter().skip(1) { - let mut request = Request::default(); - for (index, value) in row.iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "body" => setup_body(value, &mut request, None), - _ => {} - } - } - } - world.received_requests.push(request); - } - } -} - -#[when("the request is compared to the expected one")] -async fn the_request_is_compared_to_the_expected_one(world: &mut V3World) { - world.match_result.push( - match_request( - world.expected_request.as_v4_request(), - world.received_requests.first().unwrap().as_v4_request(), - &RequestResponsePact::default().boxed(), - &RequestResponseInteraction::default().boxed() - ).await - ); -} - -#[when("the requests are compared to the expected one")] -async fn the_requests_are_compared_to_the_expected_one(world: &mut V3World) { - for request in &world.received_requests { - world.match_result.push( - match_request( - world.expected_request.as_v4_request(), - request.as_v4_request(), - &RequestResponsePact::default().boxed(), - &RequestResponseInteraction::default().boxed() - ).await - ); - } -} - -#[then("the comparison should be OK")] -fn the_comparison_should_be_ok(world: &mut V3World) -> anyhow::Result<()> { - if world.match_result.iter().all(|result| result.all_matched()) { - Ok(()) - } else { - let count = world.match_result.iter() - .filter_map(|res| { - let mismatches = res.mismatches(); - if mismatches.is_empty() { - None - } else { - Some(mismatches) - } - }) - .flatten() - .collect::>(); - Err(anyhow!("There were match results with mismatches ({:?})", count)) - } -} - -#[then("the comparison should NOT be OK")] -fn the_comparison_should_not_be_ok(world: &mut V3World) -> anyhow::Result<()> { - if world.match_result.iter().all(|result| result.all_matched()) { - Err(anyhow!("All requests matched")) - } else { - Ok(()) - } -} - -#[then(expr = "the mismatches will contain a mismatch with error {string} -> {string}")] -fn the_mismatches_will_contain_a_mismatch_with_error( - world: &mut V3World, - error_path: String, - error: String -) -> anyhow::Result<()> { - if world.match_result.iter().flat_map(|result| result.mismatches()) - .any(|mismatch| { - let path_matches = match &mismatch { - Mismatch::QueryMismatch { parameter, .. } => parameter.as_str() == error_path, - Mismatch::HeaderMismatch { key, .. } => key.as_str() == error_path, - Mismatch::BodyMismatch { path, .. } => path.as_str() == error_path, - Mismatch::MetadataMismatch { key, .. } => key.as_str() == error_path, - _ => false - }; - let desc_matches = mismatch.description().contains(error.as_str()); - if path_matches && desc_matches { - true - } else if path_matches { - let desc = mismatch.description(); - if let Ok(re) = Regex::new(error.as_str()) { - re.is_match(desc.as_str()) - } else { - false - } - } else { - false - } - }) { - Ok(()) - } else { - Err(anyhow!("Did not find a mismatch with the required error message")) - } -} diff --git a/compatibility-suite/tests/v3_steps/message.rs b/compatibility-suite/tests/v3_steps/message.rs deleted file mode 100644 index b19b01c62..000000000 --- a/compatibility-suite/tests/v3_steps/message.rs +++ /dev/null @@ -1,913 +0,0 @@ -use std::collections::HashMap; -use std::fs::File; -use std::io::{BufReader, Read}; -use std::panic::catch_unwind; -use std::path::PathBuf; -use std::sync::{Arc, Mutex}; - -use anyhow::anyhow; -use bytes::Bytes; -use cucumber::{given, then, when, World}; -use cucumber::gherkin::Step; -use itertools::Itertools; -use lazy_static::lazy_static; -use maplit::hashmap; -use pact_models::{Consumer, PactSpecification, Provider}; -use pact_models::bodies::OptionalBody; -use pact_models::content_types::{ContentType, JSON, XML}; -use pact_models::generators::Generators; -use pact_models::matchingrules::matchers_from_json; -use pact_models::message::Message; -use pact_models::message_pact::MessagePact; -use pact_models::pact::{Pact, read_pact}; -use pact_models::path_exp::DocPath; -use pact_models::provider_states::ProviderState; -use pact_models::xml_utils::parse_bytes; -use serde_json::{json, Value}; - -use pact_consumer::builders::{MessageInteractionBuilder, PactBuilder}; -use pact_matching::Mismatch; -use pact_verifier::{FilterInfo, NullRequestFilterExecutor, PactSource, ProviderInfo, ProviderTransport, VerificationOptions, verify_provider_async}; -use pact_verifier::verification_result::{VerificationExecutionResult, VerificationMismatchResult}; - -use crate::shared_steps::{assert_value_type, determine_content_type, element_text, IndexType}; -use crate::shared_steps::provider::MockProviderStateExecutor; - -lazy_static!{ - pub static ref MESSAGES: Arc>> = Arc::new(Mutex::new(hashmap![])); -} - -#[derive(Debug, World)] -pub struct V3MessageWorld { - pub scenario_id: String, - pub builder: PactBuilder, - pub message_builder: MessageInteractionBuilder, - pub received_messages: Vec, - pub failed: Option, - pub loaded_pact: MessagePact, - pub message_proxy_port: u16, - pub provider_info: ProviderInfo, - pub sources: Vec, - pub provider_state_executor: Arc, - pub verification_results: VerificationExecutionResult -} - -impl Default for V3MessageWorld { - fn default() -> Self { - V3MessageWorld { - scenario_id: "".to_string(), - builder: PactBuilder::new_v3_message("V3-message-consumer", "V3-message-provider"), - message_builder: MessageInteractionBuilder::new(""), - received_messages: vec![], - failed: None, - loaded_pact: MessagePact::default(), - message_proxy_port: 0, - provider_info: Default::default(), - sources: vec![], - provider_state_executor: Arc::new(Default::default()), - verification_results: VerificationExecutionResult::new(), - } - } -} - -#[given("a message integration is being defined for a consumer test")] -fn a_message_integration_is_being_defined_for_a_consumer_test(world: &mut V3MessageWorld) { - let dir = PathBuf::from("target/compatibility-suite/v3").join(&world.scenario_id); - world.builder.with_output_dir(dir); - world.message_builder = MessageInteractionBuilder::new("a message"); -} - -#[given(expr = "the message payload contains the {string} JSON document")] -fn the_message_payload_contains_the_json_document( - world: &mut V3MessageWorld, - fixture: String -) -> anyhow::Result<()> { - let mut fixture = File::open(format!("pact-compatibility-suite/fixtures/{}.json", fixture))?; - let mut buffer = Vec::new(); - fixture.read_to_end(&mut buffer)?; - world.message_builder.body(buffer, Some("application/json".into())); - Ok(()) -} - -#[given("a message is defined")] -fn a_message_is_defined(world: &mut V3MessageWorld) { - let previous_builder = world.message_builder.clone(); - world.message_builder = MessageInteractionBuilder::new("a message"); - for state in previous_builder.build().provider_states { - if state.params.is_empty() { - world.message_builder.given(state.name); - } else { - world.message_builder.given_with_params(state.name, &Value::Object(state.params - .iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect() - )); - } - } -} - -#[given("the message is configured with the following:")] -fn the_message_configured_with_the_following(world: &mut V3MessageWorld, step: &Step) { - world.message_builder = MessageInteractionBuilder::new("a message"); - - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "body" => { - let mut message = Message::default(); - setup_body(value, &mut message); - world.message_builder.message_contents.body = message.contents; - let md = world.message_builder.message_contents.metadata - .get_or_insert_with(|| hashmap!{}); - md.extend(message.metadata.iter().map(|(k, v)| (k.clone(), v.clone()))); - }, - "generators" => { - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - let f = File::open(format!("pact-compatibility-suite/fixtures/{}", value)) - .expect(format!("could not load fixture '{}'", value).as_str()); - let reader = BufReader::new(f); - serde_json::from_reader(reader).unwrap() - }; - let mut generators = Generators::default(); - generators.load_from_map(json.as_object().unwrap()).unwrap(); - world.message_builder.message_contents.generators = Some(generators); - } - "metadata" => { - let json: Value = serde_json::from_str(value).unwrap(); - let md = world.message_builder.message_contents.metadata - .get_or_insert_with(|| hashmap!{}); - md.extend(json.as_object().unwrap().iter().map(|(k, v)| (k.clone(), v.clone()))) - } - _ => {} - } - } - } - } -} - -#[given("the message contains the following metadata:")] -fn the_message_contains_the_following_metadata(world: &mut V3MessageWorld, step: &Step) { - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap().iter() - .enumerate() - .map(|(index, h)| (h.clone(), index)) - .collect::>(); - for values in table.rows.iter().skip(1) { - let key = values.get(*headers.get("key").unwrap()).unwrap(); - let value = values.get(*headers.get("value").unwrap()).unwrap(); - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - Value::String(value.clone()) - }; - world.message_builder.metadata(key, json); - } - } -} - -#[given(expr = "a provider state {string} for the message is specified")] -fn a_provider_state_for_the_message_is_specified(world: &mut V3MessageWorld, state: String) { - world.message_builder.given(state); -} - -#[given(expr = "a provider state {string} for the message is specified with the following data:")] -fn a_provider_state_for_the_message_is_specified_with_the_following_data( - world: &mut V3MessageWorld, - step: &Step, - state: String) { - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap().iter() - .enumerate() - .map(|(index, h)| (index, h.clone())) - .collect::>(); - let params = table.rows.get(1).unwrap().iter().enumerate().map(|(i, v)| { - let key = headers.get(&i).unwrap(); - let json: Value = serde_json::from_str(v).unwrap(); - (key.clone(), json) - }).collect(); - world.message_builder.given_with_params(state, &Value::Object(params)); - } -} - -#[when("the message is successfully processed")] -fn the_message_is_successfully_processed(world: &mut V3MessageWorld) { - world.builder.push_interaction(&world.message_builder.build_v3()); - world.received_messages = world.builder.v3_messages() - .collect(); - world.failed = None; -} - -#[then("the consumer test will have passed")] -fn consumer_test_will_have_passed(world: &mut V3MessageWorld) -> anyhow::Result<()> { - match &world.failed { - None => Ok(()), - Some(err) => Err(anyhow!(err.clone())) - } -} - -#[then(expr = "the received message payload will contain the {string} JSON document")] -fn the_received_message_payload_will_contain_the_json_document( - world: &mut V3MessageWorld, - fixture: String -) -> anyhow::Result<()> { - let mut fixture = File::open(format!("pact-compatibility-suite/fixtures/{}.json", fixture))?; - let mut buffer = Vec::new(); - fixture.read_to_end(&mut buffer)?; - let message = world.received_messages.first().unwrap(); - if message.contents.value().unwrap() == buffer.as_slice() { - Ok(()) - } else { - let body = OptionalBody::Present(Bytes::from(buffer), None, None); - Err(anyhow!("Expected payload with {} but got {}", message.contents.display_string(), - body.display_string())) - } -} - -#[then(expr = "the received message content type will be {string}")] -fn the_received_message_content_type_will_be( - world: &mut V3MessageWorld, - content_type: String -) -> anyhow::Result<()> { - let message = world.received_messages.first().unwrap(); - let ct = message.message_content_type().unwrap(); - if ct.to_string() == content_type { - Ok(()) - } else { - Err(anyhow!("Expected message with content type {} but got {:?}", content_type, ct)) - } -} - -#[then("a Pact file for the message interaction will have been written")] -fn a_pact_file_for_the_message_interaction_will_have_been_written(world: &mut V3MessageWorld) -> anyhow::Result<()> { - let dir = PathBuf::from("target/compatibility-suite/v3").join(&world.scenario_id); - let pact_file = dir.join("V3-message-consumer-V3-message-provider.json"); - if pact_file.exists() { - let pact = read_pact(&pact_file)?; - if pact.specification_version() == PactSpecification::V3 { - world.loaded_pact = pact.as_message_pact()?; - Ok(()) - } else { - Err(anyhow!("Expected Pact file to be V3 Pact, but was {}", pact.specification_version())) - } - } else { - Err(anyhow!("No pact file found: {}", pact_file.to_string_lossy())) - } -} - -#[then(expr = "the pact file will contain {int} message interaction(s)")] -fn the_pact_file_will_contain_message_interaction( - world: &mut V3MessageWorld, - messages: usize -) -> anyhow::Result<()> { - let actual = world.loaded_pact.messages.len(); - if actual == messages { - Ok(()) - } else { - Err(anyhow!("Expected {} messages in the Pact, but there were {}", messages, actual)) - } -} - -#[then(expr = "the {numType} message in the pact file will contain the {string} document")] -fn the_first_message_in_the_pact_file_will_contain_the_document( - world: &mut V3MessageWorld, - index: IndexType, - fixture: String -) -> anyhow::Result<()> { - let message = world.loaded_pact.messages.get(index.val()).unwrap(); - - let mut fixture_file = File::open(format!("pact-compatibility-suite/fixtures/{}", fixture))?; - let mut buffer = Vec::new(); - fixture_file.read_to_end(&mut buffer)?; - - let mut expected = Vec::new(); - if fixture.ends_with(".json") { - let json: Value = serde_json::from_slice(&buffer)?; - let string = json.to_string(); - expected.extend_from_slice(string.as_bytes()); - } else { - expected.extend_from_slice(&buffer); - } - - let actual_body = message.contents.value().unwrap_or_default(); - if &actual_body == expected.as_slice() { - Ok(()) - } else { - let body = OptionalBody::Present(Bytes::from(buffer), None, None); - Err(anyhow!("Expected Interaction {} message payload with {} but got {}", index.val() + 1, - message.contents.display_string(), body.display_string())) - } -} - -#[then(expr = "the {numType} message in the pact file content type will be {string}")] -fn the_first_message_in_the_pact_file_content_type_will_be( - world: &mut V3MessageWorld, - index: IndexType, - content_type: String -) -> anyhow::Result<()> { - let message = world.loaded_pact.messages.get(index.val()).unwrap(); - if let Some(ct) = message.message_content_type() { - if ct.to_string() == content_type { - Ok(()) - } else { - Err(anyhow!("Message {} content type {}, but expected {}", index.val() + 1, ct, content_type)) - } - } else { - Err(anyhow!("Message has no content type set")) - } -} - -#[when(expr = "the message is NOT successfully processed with a {string} exception")] -fn the_message_is_not_successfully_processed_with_a_exception( - world: &mut V3MessageWorld, - error: String -) { - world.builder.push_interaction(&world.message_builder.build()); - let result = catch_unwind(|| { - let _messages = world.builder.v3_messages(); - // This panic will cause the message iterator to not write out the Pact file when dropped - panic!("{}", error); - }); - world.failed = result.err().map(|err| { - if let Some(err) = err.downcast_ref::<&str>() { - err.to_string() - } else if let Some(err) = err.downcast_ref::() { - err.clone() - } else { - format!("Unknown error: {:?}", err) - } - }); -} - -#[then("the consumer test will have failed")] -fn the_consumer_test_will_have_failed(world: &mut V3MessageWorld) -> anyhow::Result<()> { - if world.failed.is_some() { - Ok(()) - } else { - Err(anyhow!("Expected test to fail. It did not. Very rude.")) - } -} - -#[then(expr = "the consumer test error will be {string}")] -fn the_consumer_test_error_will_be_blah( - world: &mut V3MessageWorld, - error: String -) -> anyhow::Result<()> { - if let Some(err) = &world.failed { - if *err == error { - Ok(()) - } else { - Err(anyhow!("Expected test to fail with error '{}' but the error was '{}'", error, err)) - } - } else { - Err(anyhow!("Expected test to fail with error '{}'. It did not. Very rude.", error)) - } -} - -#[then("a Pact file for the message interaction will NOT have been written")] -fn a_pact_file_for_the_message_interaction_will_not_have_been_written( - world: &mut V3MessageWorld -) -> anyhow::Result<()> { - let dir = PathBuf::from("target/compatibility-suite/v3").join(&world.scenario_id); - let pact_file = dir.join("V3-message-consumer-V3-message-provider.json"); - if pact_file.exists() { - Err(anyhow!("Expected no pact file, but found: {}", pact_file.to_string_lossy())) - } else { - Ok(()) - } -} - -#[then(expr = "the received message metadata will contain {string} == {string}")] -fn the_received_message_metadata_will_contain( - world: &mut V3MessageWorld, - key: String, - value: String -) -> anyhow::Result<()> { - let json: Value = if value.starts_with("JSON:") { - let value_str = value.strip_prefix("JSON:") - .unwrap_or(value.as_str()) - .trim() - .replace("\\\"", "\""); - serde_json::from_str(value_str.as_str()).unwrap() - } else { - Value::String(value.clone()) - }; - if let Some(md_value) = world.received_messages.first().unwrap().metadata.get(&key) { - if *md_value == json { - Ok(()) - } else { - Err(anyhow!("Expected message metadata with key {} == {} but was {}", key, json, md_value)) - } - } else { - Err(anyhow!("Received message did not have a metadata value with key {}", key)) - } -} - -#[then(expr = "the {numType} message in the pact file will contain the message metadata {string} == {string}")] -fn the_first_message_in_the_pact_file_will_contain_the_message_metadata( - world: &mut V3MessageWorld, - index: IndexType, - key: String, - value: String -) -> anyhow::Result<()> { - let message = world.loaded_pact.messages.get(index.val()).unwrap(); - let json: Value = if value.starts_with("JSON:") { - let value_str = value.strip_prefix("JSON:") - .unwrap_or(value.as_str()) - .trim() - .replace("\\\"", "\""); - serde_json::from_str(value_str.as_str()).unwrap() - } else { - Value::String(value.clone()) - }; - if let Some(md_value) = message.metadata.get(&key) { - if *md_value == json { - Ok(()) - } else { - Err(anyhow!("Expected message metadata with key {} == {} but was {}", key, json, md_value)) - } - } else { - Err(anyhow!("Received message did not have a metadata value with key {}", key)) - } -} - -#[then(expr = "the {numType} message in the pact file will contain {int} provider state(s)")] -fn the_first_message_in_the_pact_file_will_contain_provider_states( - world: &mut V3MessageWorld, - index: IndexType, - states: usize -) -> anyhow::Result<()> { - let message = world.loaded_pact.messages.get(index.val()).unwrap(); - let actual = message.provider_states.len(); - if actual == states { - Ok(()) - } else { - Err(anyhow!("Expected message to have {} provider states, but it has {}", states, actual)) - } -} - -#[then(expr = "the {numType} message in the Pact file will contain provider state {string}")] -fn the_first_message_in_the_pact_file_will_contain_provider_state( - world: &mut V3MessageWorld, - index: IndexType, - state_name: String -) -> anyhow::Result<()> { - let message = world.loaded_pact.messages.get(index.val()).unwrap(); - if message.provider_states.iter().any(|ps| ps.name == state_name) { - Ok(()) - } else { - Err(anyhow!("Did not find a provider state '{}'", state_name)) - } -} - -#[then(expr = "the provider state {string} for the message will contain the following parameters:")] -fn the_provider_state_for_the_message_will_contain_the_following_parameters( - world: &mut V3MessageWorld, - step: &Step, - state: String -) -> anyhow::Result<()> { - let table = step.table.as_ref().unwrap(); - let params_str = table.rows.get(1).unwrap().first().unwrap(); - let params = serde_json::from_str::(params_str.as_str()) - .unwrap() - .as_object() - .unwrap() - .iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - let message = world.loaded_pact.messages.first().unwrap(); - let provider_state = message.provider_states.iter().find(|ps| ps.name == state).unwrap(); - if provider_state.params == params { - Ok(()) - } else { - Err(anyhow!("Expected provider state '{}' to have parameters {:?} but were {:?}", state, - params, provider_state.params)) - } -} - -#[then(expr = "the message contents for {string} will have been replaced with a(n) {string}")] -fn the_message_contents_for_will_have_been_replaced_with_an( - world: &mut V3MessageWorld, - path: String, - value_type: String -) -> anyhow::Result<()> { - let message_pact = world.builder.build().as_message_pact().unwrap(); - let message = message_pact.messages.first().unwrap(); - let path = DocPath::new(path).unwrap(); - let original_json: Value = serde_json::from_str(message.contents.value_as_string().unwrap().as_str()).unwrap(); - let pointer = path.as_json_pointer().unwrap(); - let pointer = pointer.as_str(); - let original_element = original_json.pointer(pointer).unwrap(); - let json: Value = serde_json::from_str(world.received_messages.first().unwrap().contents.value_as_string().unwrap().as_str()).unwrap(); - let element = json.pointer(pointer).unwrap(); - - if element == original_element { - return Err(anyhow!("Expected original ({:?}) to have been replaced", original_element)) - } - - assert_value_type(value_type, element) -} - -#[then(expr = "the received message metadata will contain {string} replaced with a(n) {string}")] -fn the_received_message_metadata_will_contain_replaced_with_an( - world: &mut V3MessageWorld, - key: String, - value_type: String -) -> anyhow::Result<()> { - let message_pact = world.builder.build().as_message_pact().unwrap(); - let message = message_pact.messages.first().unwrap(); - let original = message.metadata.get(&key).unwrap(); - let generated = world.received_messages.first().unwrap().metadata.get(&key).unwrap(); - - if generated == original { - return Err(anyhow!("Expected original ({:?}) to have been replaced", original)) - } - - assert_value_type(value_type, generated) -} - -// TODO: Message in pact-models needs to implement add_header correctly, then this can be replaced -// with the version from shared steps. -pub fn setup_body(body: &String, httppart: &mut Message) { - if !body.is_empty() { - if body.starts_with("JSON:") { - httppart.metadata.insert("contentType".to_string(), json!("application/json")); - httppart.contents = OptionalBody::Present(Bytes::from(body.strip_prefix("JSON:").unwrap_or(body).trim().to_string()), - Some(JSON.clone()), None); - } else if body.starts_with("XML:") { - httppart.metadata.insert("contentType".to_string(), json!("application/xml")); - httppart.contents = OptionalBody::Present(Bytes::from(body.strip_prefix("XML:").unwrap_or(body).trim().to_string()), - Some(XML.clone()), None); - } else if body.starts_with("file:") { - if body.ends_with("-body.xml") { - let file_name = body.strip_prefix("file:").unwrap_or(body).trim(); - let mut f = File::open(format!("pact-compatibility-suite/fixtures/{}", file_name)) - .expect(format!("could not load fixture '{}'", body).as_str()); - let mut buffer = Vec::new(); - f.read_to_end(&mut buffer) - .expect(format!("could not read fixture '{}'", body).as_str()); - let fixture = parse_bytes(buffer.as_slice()) - .expect(format!("could not parse fixture as XML: '{}'", body).as_str()); - let root = fixture.as_document().root(); - let body_node = root.children().iter().find_map(|n| n.element()).unwrap(); - let content_type = element_text(body_node, "contentType").unwrap_or("text/plain".to_string()); - httppart.metadata.insert("contentType".to_string(), json!(content_type)); - httppart.contents = OptionalBody::Present(Bytes::from(element_text(body_node, "contents").unwrap_or_default()), - ContentType::parse(content_type.as_str()).ok(), None); - } else { - let content_type = determine_content_type(body, httppart); - httppart.metadata.insert("contentType".to_string(), json!(content_type.to_string())); - - let file_name = body.strip_prefix("file:").unwrap_or(body).trim(); - let mut f = File::open(format!("pact-compatibility-suite/fixtures/{}", file_name)) - .expect(format!("could not load fixture '{}'", body).as_str()); - let mut buffer = Vec::new(); - f.read_to_end(&mut buffer) - .expect(format!("could not read fixture '{}'", body).as_str()); - httppart.contents = OptionalBody::Present(Bytes::from(buffer), - Some(content_type), None); - } - } else { - let content_type = determine_content_type(body, httppart); - httppart.metadata.insert("contentType".to_string(), json!(content_type.to_string())); - let body = Bytes::from(body.clone()); - httppart.contents = OptionalBody::Present(body, Some(content_type), None); - } - } -} - -// ---------------------------------------------------------------------- -// Provider steps -// ---------------------------------------------------------------------- - -#[given(expr = "a provider is started that can generate the {string} message with {string}")] -#[allow(deprecated)] -fn a_provider_is_started_that_can_generate_the_message( - world: &mut V3MessageWorld, - name: String, - fixture: String -) { - let key = format!("{}:{}", world.scenario_id, name); - let mut message = Message { - description: key.clone(), - .. Message::default() - }; - setup_body(&fixture, &mut message); - - { - let mut guard = MESSAGES.lock().unwrap(); - guard.insert(key, message); - } - - world.provider_info = ProviderInfo { - name: "p".to_string(), - host: "localhost".to_string(), - port: Some(world.message_proxy_port), - transports: vec![ProviderTransport { - port: Some(world.message_proxy_port), - .. ProviderTransport::default() - }], - .. ProviderInfo::default() - }; -} - -#[given(expr = "a Pact file for {string}:{string} is to be verified")] -fn a_pact_file_for_is_to_be_verified( - world: &mut V3MessageWorld, - name: String, - fixture: String -) { - let key = format!("{}:{}", world.scenario_id, name); - let mut message = Message { - description: key.clone(), - .. Message::default() - }; - setup_body(&fixture, &mut message); - - let pact = MessagePact { - consumer: Consumer { name: "c".to_string() }, - provider: Provider { name: "p".to_string() }, - messages: vec![ message ], - specification_version: PactSpecification::V3, - .. MessagePact::default() - }; - world.sources.push(PactSource::String(pact.to_json(PactSpecification::V3).unwrap().to_string())); -} - -#[given(expr = "a Pact file for {string}:{string} is to be verified with provider state {string}")] -fn a_pact_file_for_is_to_be_verified_with_provider_state( - world: &mut V3MessageWorld, - name: String, - fixture: String, - state: String -) { - let key = format!("{}:{}", world.scenario_id, name); - let mut message = Message { - description: key.clone(), - provider_states: vec![ ProviderState::default(state) ], - .. Message::default() - }; - setup_body(&fixture, &mut message); - - let pact = MessagePact { - consumer: Consumer { name: "c".to_string() }, - provider: Provider { name: "p".to_string() }, - messages: vec![ message ], - specification_version: PactSpecification::V3, - .. MessagePact::default() - }; - world.sources.push(PactSource::String(pact.to_json(PactSpecification::V3).unwrap().to_string())); -} - -#[given(expr = "a provider is started that can generate the {string} message with {string} and the following metadata:")] -#[allow(deprecated)] -fn a_provider_is_started_that_can_generate_the_message_with_the_following_metadata( - world: &mut V3MessageWorld, - step: &Step, - name: String, - fixture: String -) { - let key = format!("{}:{}", world.scenario_id, name); - let mut message = Message { - description: key.clone(), - .. Message::default() - }; - setup_body(&fixture, &mut message); - - if let Some(table) = &step.table { - for row in table.rows.iter().skip(1) { - let key = row[0].clone(); - let value = row[1].clone(); - if value.starts_with("JSON:") { - let json = serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value.as_str()).trim()).unwrap(); - message.metadata.insert(key, json); - } else { - message.metadata.insert(key, Value::String(value)); - }; - } - } - - { - let mut guard = MESSAGES.lock().unwrap(); - guard.insert(key, message); - } - - world.provider_info = ProviderInfo { - name: "p".to_string(), - host: "localhost".to_string(), - port: Some(world.message_proxy_port), - transports: vec![ProviderTransport { - port: Some(world.message_proxy_port), - .. ProviderTransport::default() - }], - .. ProviderInfo::default() - }; -} - -#[given(expr = "a Pact file for {string}:{string} is to be verified with the following metadata:")] -fn a_pact_file_for_is_to_be_verified_with_the_following_metadata( - world: &mut V3MessageWorld, - step: &Step, - name: String, - fixture: String -) { - let key = format!("{}:{}", world.scenario_id, name); - let mut message = Message { - description: key.clone(), - .. Message::default() - }; - setup_body(&fixture, &mut message); - - if let Some(table) = &step.table { - for row in &table.rows { - let key = row[0].clone(); - let value = row[1].clone(); - if value.starts_with("JSON:") { - let json = serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value.as_str()).trim()).unwrap(); - message.metadata.insert(key, json); - } else { - message.metadata.insert(key, Value::String(value)); - }; - } - } - - let pact = MessagePact { - consumer: Consumer { name: "c".to_string() }, - provider: Provider { name: "p".to_string() }, - messages: vec![ message ], - specification_version: PactSpecification::V3, - .. MessagePact::default() - }; - world.sources.push(PactSource::String(pact.to_json(PactSpecification::V3).unwrap().to_string())); -} - -#[given(expr = "a Pact file for {string} is to be verified with the following:")] -fn a_pact_file_for_is_to_be_verified_with_the_following( - world: &mut V3MessageWorld, - step: &Step, - name: String -) { - let key = format!("{}:{}", world.scenario_id, name); - let mut message = Message { - description: key.clone(), - .. Message::default() - }; - - if let Some(table) = &step.table { - for row in &table.rows { - match row[0].as_str() { - "body" => { - setup_body(&row[1], &mut message); - } - "matching rules" => { - let value = dbg!(&row[1]); - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - let f = File::open(format!("pact-compatibility-suite/fixtures/{}", value)) - .expect(format!("could not load fixture '{}'", value).as_str()); - let reader = BufReader::new(f); - serde_json::from_reader(reader).unwrap() - }; - message.matching_rules = matchers_from_json(&json!({"matchingRules": json}), &None).unwrap(); - } - "metadata" => { - for values in row[1].split(';').map(|v| v.trim().splitn(2, '=').collect_vec()) { - let key = values[0]; - let value = values[1]; - if value.starts_with("JSON:") { - let json = serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap(); - message.metadata.insert(key.to_string(), json); - } else { - message.metadata.insert(key.to_string(), Value::String(value.to_string())); - }; - } - } - _ => {} - } - } - } - - let pact = MessagePact { - consumer: Consumer { name: "c".to_string() }, - provider: Provider { name: "p".to_string() }, - messages: vec![ message ], - specification_version: PactSpecification::V3, - .. MessagePact::default() - }; - world.sources.push(PactSource::String(pact.to_json(PactSpecification::V3).unwrap().to_string())); -} - -#[given("a provider state callback is configured")] -fn a_provider_state_callback_is_configured(world: &mut V3MessageWorld) -> anyhow::Result<()> { - world.provider_state_executor.set_fail_mode(false); - Ok(()) -} - -#[when("the verification is run")] -async fn the_verification_is_run(world: &mut V3MessageWorld) -> anyhow::Result<()> { - world.verification_results = verify_provider_async( - world.provider_info.clone(), - world.sources.clone(), - FilterInfo::None, - vec![], - &VerificationOptions::::default(), - None, - &world.provider_state_executor, - None - ).await?; - Ok(()) -} - -#[then("the verification will be successful")] -fn the_verification_will_be_successful(world: &mut V3MessageWorld) -> anyhow::Result<()> { - if world.verification_results.result { - Ok(()) - } else { - Err(anyhow!("Verification failed")) - } -} - -#[then("the verification will NOT be successful")] -fn the_verification_will_not_be_successful(world: &mut V3MessageWorld) -> anyhow::Result<()> { - if world.verification_results.result { - Err(anyhow!("Was expecting the verification to fail")) - } else { - Ok(()) - } -} - -#[then("the provider state callback will be called before the verification is run")] -fn the_provider_state_callback_will_be_called_before_the_verification_is_run(world: &mut V3MessageWorld) -> anyhow::Result<()> { - if world.provider_state_executor.was_called(true) { - Ok(()) - } else { - Err(anyhow!("Provider state callback was not called")) - } -} - -#[then("the provider state callback will be called after the verification is run")] -fn the_provider_state_callback_will_be_called_after_the_verification_is_run(world: &mut V3MessageWorld) -> anyhow::Result<()> { - if world.provider_state_executor.was_called(false) { - Ok(()) - } else { - Err(anyhow!("Provider state callback teardown was not called")) - } -} - -#[then(expr = "the provider state callback will receive a setup call with {string} as the provider state parameter")] -fn the_provider_state_callback_will_receive_a_setup_call_with_as_the_provider_state_parameter( - world: &mut V3MessageWorld, - state: String -) -> anyhow::Result<()> { - if world.provider_state_executor.was_called_for_state(state.as_str(), true) { - Ok(()) - } else { - Err(anyhow!("Provider state callback was not called for state '{}'", state)) - } -} - -#[then(expr = "the provider state callback will receive a teardown call {string} as the provider state parameter")] -fn the_provider_state_callback_will_receive_a_teardown_call_as_the_provider_state_parameter( - world: &mut V3MessageWorld, - state: String -) -> anyhow::Result<()> { - if world.provider_state_executor.was_called_for_state(state.as_str(), false) { - Ok(()) - } else { - Err(anyhow!("Provider state teardown callback was not called for state '{}'", state)) - } -} - -#[then(expr = "the verification results will contain a {string} error")] -fn the_verification_results_will_contain_a_error(world: &mut V3MessageWorld, err: String) -> anyhow::Result<()> { - if world.verification_results.errors.iter().any(|(_, r)| { - match r { - VerificationMismatchResult::Mismatches { mismatches, .. } => { - mismatches.iter().any(|mismatch| { - match mismatch { - Mismatch::MethodMismatch { .. } => false, - Mismatch::PathMismatch { .. } => false, - Mismatch::StatusMismatch { .. } => err == "Response status did not match", - Mismatch::QueryMismatch { .. } => false, - Mismatch::HeaderMismatch { .. } => err == "Headers had differences", - Mismatch::BodyTypeMismatch { .. } => false, - Mismatch::BodyMismatch { .. } => err == "Body had differences", - Mismatch::MetadataMismatch { .. } => err == "Metadata had differences" - } - }) - } - VerificationMismatchResult::Error { error, .. } => match err.as_str() { - "State change request failed" => error == "One or more of the setup state change handlers has failed", - _ => error.as_str() == err - } - } - }) { - Ok(()) - } else { - Err(anyhow!("Did not find error message in verification results")) - } -} diff --git a/compatibility-suite/tests/v3_steps/mod.rs b/compatibility-suite/tests/v3_steps/mod.rs deleted file mode 100644 index 733738af4..000000000 --- a/compatibility-suite/tests/v3_steps/mod.rs +++ /dev/null @@ -1,60 +0,0 @@ -use std::collections::HashMap; -use std::panic::RefUnwindSafe; - -use cucumber::World; -use pact_models::bodies::OptionalBody; -use pact_models::generators::GeneratorTestMode; -use pact_models::pact::Pact; -use pact_models::prelude::RequestResponsePact; -use pact_models::request::Request; -use pact_models::response::Response; -use serde_json::Value; - -use pact_consumer::builders::{InteractionBuilder, PactBuilder}; -use pact_matching::RequestMatchResult; - -mod http_consumer; -mod http_matching; -mod generators; -pub mod message; - -#[derive(Debug, World)] -pub struct V3World { - pub builder: PactBuilder, - pub integration_builder: InteractionBuilder, - pub pact: Box, - pub pact_json: Value, - pub expected_request: Request, - pub received_requests: Vec, - pub match_result: Vec, - pub request: Request, - pub response: Response, - pub generated_request: Request, - pub generated_response: Response, - pub generator_test_mode: GeneratorTestMode, - pub generator_context: HashMap, - pub original_body: OptionalBody, - pub generated_body: OptionalBody -} - -impl Default for V3World { - fn default() -> Self { - V3World { - builder: PactBuilder::new("C", "P"), - integration_builder: InteractionBuilder::new("I", ""), - pact: Box::new(RequestResponsePact::default()), - pact_json: Default::default(), - expected_request: Default::default(), - received_requests: vec![], - match_result: vec![], - request: Default::default(), - response: Default::default(), - generated_request: Default::default(), - generated_response: Default::default(), - generator_test_mode: GeneratorTestMode::Consumer, - generator_context: Default::default(), - original_body: Default::default(), - generated_body: Default::default() - } - } -} diff --git a/compatibility-suite/tests/v4.rs b/compatibility-suite/tests/v4.rs deleted file mode 100644 index 6612fbf24..000000000 --- a/compatibility-suite/tests/v4.rs +++ /dev/null @@ -1,89 +0,0 @@ -use base64::Engine; -use base64::engine::general_purpose::STANDARD as BASE64; -use cucumber::World; -use itertools::Itertools; -use rocket::http::{ContentType, Header}; -use rocket::Responder; -use rocket::serde::json::Json; -use serde::Deserialize; -use serde_json::json; -use tracing::debug; -use tracing_subscriber::EnvFilter; - -use crate::v4_steps::V4World; - -mod shared_steps; -mod v4_steps; - -#[derive(Deserialize, Default, Debug)] -struct MessageDetails { - description: String -} - -#[derive(Responder)] -struct MessageResponder<'a> { - payload: Option>, - content_type: ContentType, - metadata: Header<'a> -} - -#[rocket::post("/", data = "")] -async fn messages(request: Json) -> Option> { - let details = request.into_inner(); - debug!("Got request = {:?}", details); - let guard = v4_steps::message_provider::MESSAGES.lock().unwrap(); - guard.get(details.description.as_str()) - .map(|message| { - let metadata = json!(message.contents.metadata).to_string(); - MessageResponder { - payload: message.contents.contents.value().map(|data| data.to_vec()), - content_type: message.message_content_type() - .map(|ct| ContentType::parse_flexible(ct.to_string().as_str())) - .flatten() - .unwrap_or(ContentType::Plain), - metadata: Header::new("pact-message-metadata", BASE64.encode(metadata)) - } - }) -} - -#[tokio::main] -async fn main() { - let format = tracing_subscriber::fmt::format().pretty(); - tracing_subscriber::fmt() - .with_env_filter(EnvFilter::from_default_env()) - .event_format(format) - .init(); - - let server = rocket::build() - .mount("/", rocket::routes![messages]) - .ignite() - .await.expect("Could not start the Rocket server"); - let shutdown = server.shutdown(); - let port = server.config().port; - tokio::spawn(server.launch()); - - V4World::cucumber() - .fail_on_skipped() - .before(move |_, _, scenario, world| Box::pin(async move { - world.scenario_id = scenario.name.clone(); - world.message_proxy_port = port; - })) - .after(|_feature, _, _scenario, _status, world| Box::pin(async move { - if let Some(world) = world { - let mut ms = world.provider_server.lock().unwrap(); - let _ = ms.shutdown(); - - let mut guard = v4_steps::message_provider::MESSAGES.lock().unwrap(); - let keys = guard.keys().cloned().collect_vec(); - for key in keys { - if key.starts_with(world.scenario_id.as_str()) { - guard.remove(key.as_str()); - } - } - } - })) - .run_and_exit("pact-compatibility-suite/features/V4") - .await; - - shutdown.notify(); -} diff --git a/compatibility-suite/tests/v4_steps/generators.rs b/compatibility-suite/tests/v4_steps/generators.rs deleted file mode 100644 index 105a7193b..000000000 --- a/compatibility-suite/tests/v4_steps/generators.rs +++ /dev/null @@ -1,155 +0,0 @@ -use std::collections::HashMap; -use std::fs::File; -use std::io::BufReader; - -use anyhow::anyhow; -use cucumber::{given, then, when}; -use cucumber::gherkin::Step; -use maplit::hashmap; -use pact_models::generators::{Generators, GeneratorTestMode}; -use pact_models::json_utils::json_to_string; -use pact_models::path_exp::DocPath; -use pact_models::v4::http_parts::HttpRequest; -use serde_json::Value; - -use pact_matching::generate_request; - -use crate::shared_steps::{assert_value_type, setup_body}; -use crate::v4_steps::V4World; - -#[given(expr = "a request configured with the following generators:")] -fn a_request_configured_with_the_following_generators(world: &mut V4World, step: &Step) { - let mut request = HttpRequest { - path: "/path/one".to_string(), - .. HttpRequest::default() - }; - - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "body" => setup_body(value, &mut request, None), - "generators" => { - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - let f = File::open(format!("pact-compatibility-suite/fixtures/{}", value)) - .expect(format!("could not load fixture '{}'", value).as_str()); - let reader = BufReader::new(f); - serde_json::from_reader(reader).unwrap() - }; - let mut generators = Generators::default(); - generators.load_from_map(json.as_object().unwrap()).unwrap(); - request.generators = generators; - } - _ => {} - } - } - } - } - - world.original_body = request.body.clone(); - world.request = request; -} - -#[given(expr = "the generator test mode is set as {string}")] -fn the_generator_test_mode_is_set_as(world: &mut V4World, mode: String) { - world.generator_test_mode = if mode == "Consumer" { - GeneratorTestMode::Consumer - } else { - GeneratorTestMode::Provider - }; -} - -#[when(expr = "the request is prepared for use with a {string} context:")] -async fn the_request_is_prepared_for_use_with_a_context( - world: &mut V4World, - step: &Step, - context_field: String -) { - let context = if let Some(table) = step.table.as_ref() { - let value = table.rows.first().unwrap().first().unwrap(); - let json: Value = serde_json::from_str(value).unwrap(); - let attributes = json.as_object().unwrap(); - let map = attributes.iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect::>(); - if context_field == "providerState" { - map - } else if context_field == "mockServer" { - hashmap!{ - context_field.to_string() => Value::Object(map.iter() - .map(|(k, v)| { - if k == "href" { - ("url".to_string(), v.clone()) - } else { - (k.clone(), v.clone()) - } - }).collect()) - } - } else { - hashmap!{ - context_field.to_string() => Value::Object(map.iter() - .map(|(k, v)| (k.clone(), v.clone())).collect()) - } - } - } else { - world.generator_context.clone() - }; - - let context = context.iter() - .map(|(k, v)| (k.as_str(), v.clone())) - .collect::>(); - world.generated_request = generate_request(&world.request, &world.generator_test_mode, &context).await; - world.generated_body = world.generated_request.body.clone(); -} - -#[when("the request is prepared for use")] -async fn the_request_prepared_for_use(world: &mut V4World) { - let context = world.generator_context.iter() - .map(|(k, v)| (k.as_str(), v.clone())) - .collect(); - world.generated_request = generate_request(&world.request, &world.generator_test_mode, &context).await; - world.generated_body = world.generated_request.body.clone(); -} - -#[then(expr = "the body value for {string} will have been replaced with {string}")] -fn the_body_value_for_will_have_been_replaced_with_value( - world: &mut V4World, - path: String, - value: String -) -> anyhow::Result<()> { - let path = DocPath::new(path).unwrap(); - let original_json: Value = serde_json::from_str(world.original_body.value_as_string().unwrap().as_str()).unwrap(); - let original_element = original_json.pointer(path.as_json_pointer().unwrap().as_str()).unwrap(); - let json: Value = serde_json::from_str(world.generated_body.value_as_string().unwrap().as_str()).unwrap(); - let element = json.pointer(path.as_json_pointer().unwrap().as_str()).unwrap(); - - if element == original_element { - Err(anyhow!("Expected original ({:?}) to have been replaced", original_element)) - } else if json_to_string(&element) == value { - Ok(()) - } else { - Err(anyhow!("Expected value ({:?}) to be equal to {}", element, value)) - } -} - -#[then(expr = "the body value for {string} will have been replaced with a(n) {string}")] -fn the_body_value_for_will_have_been_replaced_with_a_value( - world: &mut V4World, - path: String, - value_type: String -) -> anyhow::Result<()> { - let path = DocPath::new(path).unwrap(); - let original_json: Value = serde_json::from_str(world.original_body.value_as_string().unwrap().as_str()).unwrap(); - let original_element = original_json.pointer(path.as_json_pointer().unwrap().as_str()).unwrap(); - let json: Value = serde_json::from_str(world.generated_body.value_as_string().unwrap().as_str()).unwrap(); - let element = json.pointer(path.as_json_pointer().unwrap().as_str()).unwrap(); - - if element == original_element { - return Err(anyhow!("Expected original ({:?}) to have been replaced", original_element)) - } - - assert_value_type(value_type, element) -} diff --git a/compatibility-suite/tests/v4_steps/http_consumer.rs b/compatibility-suite/tests/v4_steps/http_consumer.rs deleted file mode 100644 index e3eb15aeb..000000000 --- a/compatibility-suite/tests/v4_steps/http_consumer.rs +++ /dev/null @@ -1,29 +0,0 @@ -use cucumber::given; - -use pact_consumer::builders::{InteractionBuilder, PactBuilder}; - -use crate::v4_steps::V4World; - -#[given("an HTTP interaction is being defined for a consumer test")] -fn an_http_integration_is_being_defined_for_a_consumer_test(world: &mut V4World) { - world.builder = PactBuilder::new_v4("V4 consumer", "V4 provider"); - world.integration_builder = Some(InteractionBuilder::new("interaction for a consumer test", "")); -} - -#[given(expr = "a key of {string} is specified for the HTTP interaction")] -fn a_key_of_is_specified(world: &mut V4World, key: String) { - let builder = world.integration_builder.as_mut().unwrap(); - builder.with_key(key); -} - -#[given("the HTTP interaction is marked as pending")] -fn the_interaction_is_marked_as_pending(world: &mut V4World) { - let builder = world.integration_builder.as_mut().unwrap(); - builder.pending(true); -} - -#[given(expr = "a comment {string} is added to the HTTP interaction")] -fn a_comment_is_added(world: &mut V4World, value: String) { - let builder = world.integration_builder.as_mut().unwrap(); - builder.comment(value); -} diff --git a/compatibility-suite/tests/v4_steps/http_matching.rs b/compatibility-suite/tests/v4_steps/http_matching.rs deleted file mode 100644 index b3ea52d7e..000000000 --- a/compatibility-suite/tests/v4_steps/http_matching.rs +++ /dev/null @@ -1,220 +0,0 @@ -use std::fs::File; -use std::io::BufReader; -use anyhow::anyhow; -use cucumber::gherkin::Step; -use cucumber::{given, then, when}; -use maplit::hashmap; -use pact_models::interaction::Interaction; -use pact_models::matchingrules::matchers_from_json; -use pact_models::pact::Pact; -use pact_models::v4::http_parts::{HttpRequest, HttpResponse}; -use pact_models::v4::synch_http::SynchronousHttp; -use serde_json::{json, Value}; -use pact_matching::{match_request, match_response, Mismatch}; -use crate::shared_steps::setup_body; - -use crate::v4_steps::V4World; - -#[given("an expected response configured with the following:")] -fn an_expected_response_configured_with_the_following(world: &mut V4World, step: &Step) { - let mut expected_response = HttpResponse::default(); - - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "status" => expected_response.status = value.parse().unwrap(), - "body" => setup_body(value, &mut expected_response, None), - "matching rules" => { - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - let f = File::open(format!("pact-compatibility-suite/fixtures/{}", value)) - .expect(format!("could not load fixture '{}'", value).as_str()); - let reader = BufReader::new(f); - serde_json::from_reader(reader).unwrap() - }; - expected_response.matching_rules = matchers_from_json(&json!({ - "matchingRules": json - }), &None) - .expect("Matching rules fixture is not valid JSON"); - } - _ => {} - } - } - } - } - - world.expected_response = expected_response; -} - -#[given(expr = "a status {int} response is received")] -fn a_status_response_is_received(world: &mut V4World, status: u16) { - world.received_responses.push(HttpResponse { - status, - .. HttpResponse::default() - }); -} - -#[when("the response is compared to the expected one")] -async fn the_response_is_compared_to_the_expected_one(world: &mut V4World) { - world.response_results.extend(match_response(world.expected_response.clone(), - world.received_responses.first().unwrap().clone(), &world.pact.boxed(), &SynchronousHttp::default().boxed()) - .await - ) -} - -#[then("the response comparison should be OK")] -fn the_response_comparison_should_be_ok(world: &mut V4World) -> anyhow::Result<()> { - if world.response_results.is_empty() { - Ok(()) - } else { - Err(anyhow!("Comparison resulted in {} mismatches", world.response_results.len())) - } -} - -#[then("the response comparison should NOT be OK")] -fn the_response_comparison_should_not_be_ok(world: &mut V4World) -> anyhow::Result<()> { - if !world.response_results.is_empty() { - Ok(()) - } else { - Err(anyhow!("Comparison resulted in no mismatches")) - } -} - -#[then(expr = "the response mismatches will contain a {string} mismatch with error {string}")] -fn the_response_mismatches_will_contain_a_mismatch_with_error( - world: &mut V4World, - mismatch_type: String, - error: String -) -> anyhow::Result<()> { - if world.response_results.iter().any(|m| { - let correct_type = match m { - Mismatch::BodyTypeMismatch { .. } => mismatch_type == "body-content-type", - Mismatch::StatusMismatch { .. } => mismatch_type == "status", - _ => m.mismatch_type().to_lowercase().starts_with(mismatch_type.as_str()) - }; - correct_type && m.description() == error - }) { - Ok(()) - } else { - Err(anyhow!("Did not find a {} error with message '{}'", mismatch_type, error)) - } -} - -#[given(expr = "an expected request configured with the following:")] -fn an_expected_request_configured_with_the_following(world: &mut V4World, step: &Step) { - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - let mut data = hashmap!{}; - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - data.insert(field.as_str(), value); - } - } - - if let Some(body) = data.get("body") { - setup_body(body, &mut world.expected_request, data.get("content type").map(|ct| ct.as_str())); - } - - if let Some(value) = data.get("matching rules") { - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - let f = File::open(format!("pact-compatibility-suite/fixtures/{}", value)) - .expect(format!("could not load fixture '{}'", value).as_str()); - let reader = BufReader::new(f); - serde_json::from_reader(reader).unwrap() - }; - world.expected_request.matching_rules = matchers_from_json(&json!({ - "matchingRules": json - }), &None) - .expect("Matching rules fixture is not valid JSON"); - } - } -} - -#[given(expr = "a request is received with the following:")] -fn a_request_is_received_with_the_following(world: &mut V4World, step: &Step) { - let mut request = HttpRequest::default(); - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - let mut data = hashmap!{}; - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - data.insert(field.as_str(), value); - } - } - - if let Some(body) = data.get("body") { - setup_body(body, &mut request, data.get("content type").map(|ct| ct.as_str())); - } - } - world.received_requests.push(request); -} - -#[when("the request is compared to the expected one")] -async fn the_request_is_compared_to_the_expected_one(world: &mut V4World) { - world.request_results.push( - match_request( - world.expected_request.clone(), - world.received_requests.first().unwrap().clone(), - &world.pact.boxed(), &SynchronousHttp::default().boxed() - ).await - ); -} - -#[then("the comparison should be OK")] -fn the_comparison_should_be_ok(world: &mut V4World) -> anyhow::Result<()> { - if world.request_results.iter().all(|result| result.all_matched()) { - Ok(()) - } else { - let count = world.request_results.iter() - .filter_map(|res| { - let mismatches = res.mismatches(); - if mismatches.is_empty() { - None - } else { - Some(mismatches) - } - }) - .flatten() - .collect::>(); - Err(anyhow!("There were match results with mismatches ({:?})", count)) - } -} - -#[then("the comparison should NOT be OK")] -fn the_comparison_should_not_be_ok(world: &mut V4World) -> anyhow::Result<()> { - if world.request_results.iter().all(|result| result.all_matched()) { - Err(anyhow!("All requests matched")) - } else { - Ok(()) - } -} - -#[then(expr = "the mismatches will contain a mismatch with error {string} -> {string}")] -fn the_mismatches_will_contain_a_mismatch_with_error( - world: &mut V4World, - error_path: String, - error: String -) -> anyhow::Result<()> { - if world.request_results.iter().flat_map(|result| result.mismatches()) - .any(|mismatch| { - let path_matches = match &mismatch { - Mismatch::QueryMismatch { parameter, .. } => parameter.as_str() == error_path, - Mismatch::HeaderMismatch { key, .. } => key.as_str() == error_path, - Mismatch::BodyMismatch { path, .. } => path.as_str() == error_path, - Mismatch::MetadataMismatch { key, .. } => key.as_str() == error_path, - _ => false - }; - let error = error.replace("\\\"", "\""); - let desc_matches = mismatch.description().contains(error.as_str()); - path_matches && desc_matches - }) { - Ok(()) - } else { - Err(anyhow!("Did not find a mismatch with the required error message")) - } -} diff --git a/compatibility-suite/tests/v4_steps/http_provider.rs b/compatibility-suite/tests/v4_steps/http_provider.rs deleted file mode 100644 index 2241a857b..000000000 --- a/compatibility-suite/tests/v4_steps/http_provider.rs +++ /dev/null @@ -1,309 +0,0 @@ -use std::collections::hash_map::Entry; -use std::collections::HashMap; -use std::sync::Arc; - -use anyhow::anyhow; -use async_trait::async_trait; -use cucumber::{given, then, when}; -use cucumber::gherkin::Step; -use maplit::hashmap; -use pact_models::{Consumer, PactSpecification, Provider}; -use pact_models::headers::parse_header; -use pact_models::http_parts::HttpPart; -use pact_models::interaction::Interaction; -use pact_models::pact::Pact; -use pact_models::prelude::ProviderState; -use pact_models::prelude::v4::V4Pact; -use pact_models::v4::interaction::V4Interaction; -use reqwest::Client; -use serde_json::{json, Value}; -use uuid::Uuid; -use pact_matching::Mismatch; - -use pact_mock_server::mock_server::{MockServer, MockServerConfig}; -use pact_verifier::{ - FilterInfo, - PactSource, - ProviderInfo, - ProviderTransport, - VerificationOptions, - verify_provider_async -}; -use pact_verifier::callback_executors::ProviderStateExecutor; -use pact_verifier::verification_result::VerificationMismatchResult; - -use crate::shared_steps::{setup_body, setup_common_interactions}; -use crate::shared_steps::provider::ProviderWorldRequestFilter; -use crate::v4_steps::V4World; - -#[given("the following HTTP interactions have been defined:")] -fn the_following_http_interactions_have_been_setup(world: &mut V4World, step: &Step) { - if let Some(table) = step.table.as_ref() { - let interactions = setup_common_interactions(table); - world.interactions.extend(interactions.iter().map(|i| i.as_v4().unwrap())); - } -} - -#[given(expr = "a provider is started that returns the response from interaction {int}")] -#[allow(deprecated)] -async fn a_provider_is_started_that_returns_the_response_from_interaction(world: &mut V4World, num: usize) -> anyhow::Result<()> { - let pact = V4Pact { - consumer: Consumer { name: "v4-compatibility-suite-c".to_string() }, - provider: Provider { name: "p".to_string() }, - interactions: vec![ world.interactions.get(num - 1).unwrap().boxed_v4() ], - .. V4Pact::default() - }; - world.provider_key = Uuid::new_v4().to_string(); - let config = MockServerConfig { - pact_specification: PactSpecification::V4, - .. MockServerConfig::default() - }; - let (mock_server, future) = MockServer::new( - world.provider_key.clone(), pact.boxed(), "[::1]:0".parse()?, config - ).await.map_err(|err| anyhow!(err))?; - tokio::spawn(future); - world.provider_server = mock_server; - - let ms = world.provider_server.lock().unwrap(); - world.provider_info = ProviderInfo { - name: "p".to_string(), - host: "[::1]".to_string(), - port: ms.port, - transports: vec![ProviderTransport { - port: ms.port, - .. ProviderTransport::default() - }], - .. ProviderInfo::default() - }; - - Ok(()) -} - -#[given(expr = "a provider is started that returns the response from interaction {int}, with the following changes:")] -#[allow(deprecated)] -async fn a_provider_is_started_that_returns_the_response_from_interaction_with_the_following_changes( - world: &mut V4World, - step: &Step, - num: usize -) -> anyhow::Result<()> { - let mut interaction = world.interactions.get(num - 1).unwrap() - .as_v4_http().unwrap(); - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "status" => interaction.response.status = value.parse().unwrap(), - "headers" => { - let headers = interaction.response.headers_mut(); - let headers_to_add = value.split(",") - .map(|header| { - let key_value = header.strip_prefix("'").unwrap_or(header) - .strip_suffix("'").unwrap_or(header) - .splitn(2, ":") - .map(|v| v.trim()) - .collect::>(); - (key_value[0].to_string(), parse_header(key_value[0], key_value[1])) - }); - for (k, v) in headers_to_add { - match headers.entry(k) { - Entry::Occupied(mut entry) => { - entry.get_mut().extend_from_slice(&v); - } - Entry::Vacant(entry) => { - entry.insert(v); - } - } - } - }, - "body" => { - setup_body(value, &mut interaction.response, None); - }, - _ => {} - } - } - } - } - - let pact = V4Pact { - consumer: Consumer { name: "v1-compatibility-suite-c".to_string() }, - provider: Provider { name: "p".to_string() }, - interactions: vec![interaction.boxed_v4()], - .. V4Pact::default() - }; - world.provider_key = Uuid::new_v4().to_string(); - let config = MockServerConfig { - pact_specification: PactSpecification::V4, - .. MockServerConfig::default() - }; - let (mock_server, future) = MockServer::new( - world.provider_key.clone(), pact.boxed(), "[::1]:0".parse()?, config - ).await.map_err(|err| anyhow!(err))?; - tokio::spawn(future); - world.provider_server = mock_server; - - let ms = world.provider_server.lock().unwrap(); - world.provider_info = ProviderInfo { - name: "p".to_string(), - host: "[::1]".to_string(), - port: ms.port, - transports: vec![ProviderTransport { - port: ms.port, - .. ProviderTransport::default() - }], - .. ProviderInfo::default() - }; - - Ok(()) -} - -#[given(expr = "a Pact file for interaction {int} is to be verified, but is marked pending")] -fn a_pact_file_for_interaction_is_to_be_verified_but_is_marked_pending( - world: &mut V4World, - num: usize -) { - let mut interaction = world.interactions.get(num - 1).unwrap() - .as_v4_http().unwrap(); - interaction.pending = true; - let pact = V4Pact { - consumer: Consumer { name: format!("c_{}", num) }, - provider: Provider { name: "p".to_string() }, - interactions: vec![ interaction.boxed_v4() ], - .. V4Pact::default() - }; - world.sources.push(PactSource::String(pact.to_json(PactSpecification::V4).unwrap().to_string())); -} - -#[given(expr = "a Pact file for interaction {int} is to be verified with the following comments:")] -fn a_pact_file_for_interaction_is_to_be_verified_with_the_following_comments( - world: &mut V4World, - step: &Step, - num: usize -) { - let mut interaction = world.interactions.get(num - 1).unwrap() - .as_v4_http().unwrap(); - - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for row in table.rows.iter().skip(1) { - let data: HashMap = row.iter().enumerate().map(|(i, v)| (headers[i].clone(), v.clone())).collect(); - match data["type"].as_str() { - "text" => { - match interaction.comments.entry("text".to_string()) { - Entry::Occupied(mut entry) => { - let array = entry.get_mut().as_array_mut().unwrap(); - array.push(json!(data["comment"])); - } - Entry::Vacant(entry) => { - entry.insert(json!([ data["comment"] ])); - } - } - } - "testname" => { - interaction.comments.insert("testname".to_string(), json!(data["comment"])); - }, - _ => {} - } - } - } - - let pact = V4Pact { - consumer: Consumer { name: format!("c_{}", num) }, - provider: Provider { name: "p".to_string() }, - interactions: vec![ interaction.boxed_v4() ], - .. V4Pact::default() - }; - world.sources.push(PactSource::String(pact.to_json(PactSpecification::V4).unwrap().to_string())); -} - -#[derive(Debug)] -struct DummyProviderStateExecutor; - -#[async_trait] -impl ProviderStateExecutor for DummyProviderStateExecutor { - async fn call( - self: Arc, - _interaction_id: Option, - _provider_state: &ProviderState, - _setup: bool, - _client: Option<&Client> - ) -> anyhow::Result> { - Ok(hashmap!{}) - } - - fn teardown(self: &Self) -> bool { - return false - } -} - -#[when("the verification is run")] -async fn the_verification_is_run(world: &mut V4World) -> anyhow::Result<()> { - let options = VerificationOptions::::default(); - world.verification_results = verify_provider_async( - world.provider_info.clone(), - world.sources.clone(), - FilterInfo::None, - vec![], - &options, - None, - &Arc::new(DummyProviderStateExecutor {}), - None - ).await?; - Ok(()) -} - -#[then("the verification will be successful")] -fn the_verification_will_be_successful(world: &mut V4World) -> anyhow::Result<()> { - if world.verification_results.result { - Ok(()) - } else { - Err(anyhow!("Verification failed")) - } -} - -#[then(expr = "there will be a pending {string} error")] -fn there_will_be_a_pending_error(world: &mut V4World, err: String) -> anyhow::Result<()> { - if let Some(_) = world.verification_results.pending_errors.iter().find(|(_, result)| { - match result { - VerificationMismatchResult::Mismatches { mismatches, .. } => { - mismatches.iter().any(|mismatch| { - match mismatch { - Mismatch::MethodMismatch { .. } => false, - Mismatch::PathMismatch { .. } => false, - Mismatch::StatusMismatch { .. } => err == "Response status did not match", - Mismatch::QueryMismatch { .. } => false, - Mismatch::HeaderMismatch { .. } => err == "Headers had differences", - Mismatch::BodyTypeMismatch { .. } => false, - Mismatch::BodyMismatch { .. } => err == "Body had differences", - Mismatch::MetadataMismatch { .. } => false - } - }) - } - VerificationMismatchResult::Error { error, .. } => err == *error - } - }) { - Ok(()) - } else { - Err(anyhow!("Did not find {} in the pending errors", err)) - } -} - -#[then(expr = "the comment {string} will have been printed to the console")] -fn the_comment_will_have_been_printed_to_the_console(world: &mut V4World, comment: String) -> anyhow::Result<()> { - let comment = comment.as_str(); - if world.verification_results.output.iter().find(|o| o.contains(comment)).is_some() { - Ok(()) - } else { - Err(anyhow!("Did not find '{}' in the output", comment)) - } -} - -#[then(expr = "the {string} will displayed as the original test name")] -fn the_will_displayed_as_the_original_test_name(world: &mut V4World, name: String) -> anyhow::Result<()> { - let comment = format!("Test Name: {}", name); - if world.verification_results.output.iter().find(|o| o.contains(comment.as_str())).is_some() { - Ok(()) - } else { - Err(anyhow!("Did not find '{}' in the output", comment)) - } -} diff --git a/compatibility-suite/tests/v4_steps/message_consumer.rs b/compatibility-suite/tests/v4_steps/message_consumer.rs deleted file mode 100644 index fd09499fc..000000000 --- a/compatibility-suite/tests/v4_steps/message_consumer.rs +++ /dev/null @@ -1,26 +0,0 @@ -use cucumber::given; -use pact_consumer::builders::MessageInteractionBuilder; -use crate::v4_steps::V4World; - -#[given("a message interaction is being defined for a consumer test")] -fn a_message_integration_is_being_defined_for_a_consumer_test(world: &mut V4World) { - world.message_builder = Some(MessageInteractionBuilder::new("a message")); -} - -#[given(expr = "a key of {string} is specified for the message interaction")] -fn message_a_key_of_is_specified(world: &mut V4World, key: String) { - let builder = world.message_builder.as_mut().unwrap(); - builder.with_key(key); -} - -#[given("the message interaction is marked as pending")] -fn the_message_interaction_is_marked_as_pending(world: &mut V4World) { - let builder = world.message_builder.as_mut().unwrap(); - builder.pending(true); -} - -#[given(expr = "a comment {string} is added to the message interaction")] -fn message_a_comment_is_added(world: &mut V4World, value: String) { - let builder = world.message_builder.as_mut().unwrap(); - builder.comment(value); -} diff --git a/compatibility-suite/tests/v4_steps/message_provider.rs b/compatibility-suite/tests/v4_steps/message_provider.rs deleted file mode 100644 index 262719690..000000000 --- a/compatibility-suite/tests/v4_steps/message_provider.rs +++ /dev/null @@ -1,179 +0,0 @@ -use std::collections::hash_map::Entry; -use std::collections::HashMap; -use std::fs::File; -use std::io::Read; -use std::sync::{Arc, Mutex}; -use bytes::Bytes; -use cucumber::gherkin::Step; - -use cucumber::given; -use lazy_static::lazy_static; -use maplit::hashmap; -use pact_models::bodies::OptionalBody; -use pact_models::{Consumer, PactSpecification, Provider}; -use pact_models::content_types::{ContentType, JSON, XML}; -use pact_models::pact::Pact; -use pact_models::v4::async_message::AsynchronousMessage; -use pact_models::v4::interaction::V4Interaction; -use pact_models::v4::message_parts::MessageContents; -use pact_models::v4::pact::V4Pact; -use pact_models::xml_utils::parse_bytes; -use serde_json::json; - -use pact_verifier::{PactSource, ProviderInfo, ProviderTransport}; -use crate::shared_steps::{determine_content_type, element_text}; - -use crate::v4_steps::V4World; - -lazy_static!{ - pub static ref MESSAGES: Arc>> = Arc::new(Mutex::new(hashmap![])); -} - -pub fn setup_body(body: &String, message: &mut MessageContents) { - if !body.is_empty() { - if body.starts_with("JSON:") { - message.metadata.insert("contentType".to_string(), json!("application/json")); - message.contents = OptionalBody::Present(Bytes::from(body.strip_prefix("JSON:").unwrap_or(body).trim().to_string()), - Some(JSON.clone()), None); - } else if body.starts_with("XML:") { - message.metadata.insert("contentType".to_string(), json!("application/xml")); - message.contents = OptionalBody::Present(Bytes::from(body.strip_prefix("XML:").unwrap_or(body).trim().to_string()), - Some(XML.clone()), None); - } else if body.starts_with("file:") { - if body.ends_with("-body.xml") { - let file_name = body.strip_prefix("file:").unwrap_or(body).trim(); - let mut f = File::open(format!("pact-compatibility-suite/fixtures/{}", file_name)) - .expect(format!("could not load fixture '{}'", body).as_str()); - let mut buffer = Vec::new(); - f.read_to_end(&mut buffer) - .expect(format!("could not read fixture '{}'", body).as_str()); - let fixture = parse_bytes(buffer.as_slice()) - .expect(format!("could not parse fixture as XML: '{}'", body).as_str()); - let root = fixture.as_document().root(); - let body_node = root.children().iter().find_map(|n| n.element()).unwrap(); - let content_type = element_text(body_node, "contentType").unwrap_or("text/plain".to_string()); - message.metadata.insert("contentType".to_string(), json!(content_type)); - message.contents = OptionalBody::Present(Bytes::from(element_text(body_node, "contents").unwrap_or_default()), - ContentType::parse(content_type.as_str()).ok(), None); - } else { - let content_type = determine_content_type(body, message); - message.metadata.insert("contentType".to_string(), json!(content_type.to_string())); - - let file_name = body.strip_prefix("file:").unwrap_or(body).trim(); - let mut f = File::open(format!("pact-compatibility-suite/fixtures/{}", file_name)) - .expect(format!("could not load fixture '{}'", body).as_str()); - let mut buffer = Vec::new(); - f.read_to_end(&mut buffer) - .expect(format!("could not read fixture '{}'", body).as_str()); - message.contents = OptionalBody::Present(Bytes::from(buffer), - Some(content_type), None); - } - } else { - let content_type = determine_content_type(body, message); - message.metadata.insert("contentType".to_string(), json!(content_type.to_string())); - let body = Bytes::from(body.clone()); - message.contents = OptionalBody::Present(body, Some(content_type), None); - } - } -} - -#[given(expr = "a provider is started that can generate the {string} message with {string}")] -#[allow(deprecated)] -fn a_provider_is_started_that_can_generate_the_message( - world: &mut V4World, - name: String, - fixture: String -) { - let key = format!("{}:{}", world.scenario_id, name); - let mut message = AsynchronousMessage { - description: key.clone(), - .. AsynchronousMessage::default() - }; - setup_body(&fixture, &mut message.contents); - - { - let mut guard = MESSAGES.lock().unwrap(); - guard.insert(key, message); - } - - world.provider_info = ProviderInfo { - name: "p".to_string(), - host: "localhost".to_string(), - port: Some(world.message_proxy_port), - transports: vec![ProviderTransport { - port: Some(world.message_proxy_port), - .. ProviderTransport::default() - }], - .. ProviderInfo::default() - }; -} - -#[given(expr = "a Pact file for {string}:{string} is to be verified, but is marked pending")] -fn a_pact_file_for_is_to_be_verified_but_is_marked_pending( - world: &mut V4World, - name: String, - fixture: String -) { - let key = format!("{}:{}", world.scenario_id, name); - let mut message = AsynchronousMessage { - description: key.clone(), - pending: true, - .. AsynchronousMessage::default() - }; - setup_body(&fixture, &mut message.contents); - - let pact = V4Pact { - consumer: Consumer { name: format!("c_{}", name) }, - provider: Provider { name: "p".to_string() }, - interactions: vec![ message.boxed_v4() ], - .. V4Pact::default() - }; - world.sources.push(PactSource::String(pact.to_json(PactSpecification::V4).unwrap().to_string())); -} - -#[given(expr = "a Pact file for {string}:{string} is to be verified with the following comments:")] -fn a_pact_file_for_is_to_be_verified_with_the_following_comments( - world: &mut V4World, - step: &Step, - name: String, - fixture: String -) { - let key = format!("{}:{}", world.scenario_id, name); - let mut message = AsynchronousMessage { - description: key.clone(), - .. AsynchronousMessage::default() - }; - setup_body(&fixture, &mut message.contents); - - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - for row in table.rows.iter().skip(1) { - let data: HashMap = row.iter().enumerate().map(|(i, v)| (headers[i].clone(), v.clone())).collect(); - match data["type"].as_str() { - "text" => { - match message.comments.entry("text".to_string()) { - Entry::Occupied(mut entry) => { - let array = entry.get_mut().as_array_mut().unwrap(); - array.push(json!(data["comment"])); - } - Entry::Vacant(entry) => { - entry.insert(json!([ data["comment"] ])); - } - } - } - "testname" => { - message.comments.insert("testname".to_string(), json!(data["comment"])); - }, - _ => {} - } - } - } - - let pact = V4Pact { - consumer: Consumer { name: format!("c_{}", name) }, - provider: Provider { name: "p".to_string() }, - interactions: vec![ message.boxed_v4() ], - .. V4Pact::default() - }; - world.sources.push(PactSource::String(pact.to_json(PactSpecification::V4).unwrap().to_string())); -} diff --git a/compatibility-suite/tests/v4_steps/mod.rs b/compatibility-suite/tests/v4_steps/mod.rs deleted file mode 100644 index 4c3845295..000000000 --- a/compatibility-suite/tests/v4_steps/mod.rs +++ /dev/null @@ -1,172 +0,0 @@ -use std::collections::HashMap; -use std::panic::RefUnwindSafe; -use std::sync::{Arc, Mutex}; -use anyhow::anyhow; - -use cucumber::{then, when, World}; -use pact_models::bodies::OptionalBody; -use pact_models::generators::GeneratorTestMode; -use pact_models::json_utils::json_to_string; -use pact_models::pact::Pact; -use pact_models::PactSpecification; -use pact_models::v4::http_parts::{HttpRequest, HttpResponse}; -use pact_models::v4::interaction::V4Interaction; -use pact_models::v4::pact::V4Pact; -use pact_models::v4::sync_message::SynchronousMessage; -use serde_json::Value; - -use pact_consumer::builders::{ - InteractionBuilder, - MessageInteractionBuilder, - PactBuilder, - SyncMessageInteractionBuilder -}; -use pact_matching::{Mismatch, RequestMatchResult}; -use pact_mock_server::mock_server::MockServer; -use pact_verifier::{PactSource, ProviderInfo}; -use pact_verifier::verification_result::VerificationExecutionResult; -use crate::shared_steps::IndexType; - -mod http_consumer; -mod http_provider; -mod generators; -mod http_matching; -mod message_consumer; -pub(crate) mod message_provider; -mod sync_message_consumer; - -#[derive(Debug, World)] -pub struct V4World { - pub scenario_id: String, - pub request: HttpRequest, - pub generated_request: HttpRequest, - pub original_body: OptionalBody, - pub generated_body: OptionalBody, - pub generator_test_mode: GeneratorTestMode, - pub generator_context: HashMap, - pub builder: PactBuilder, - pub integration_builder: Option, - pub message_builder: Option, - pub sync_message_builder: Option, - pub pact: V4Pact, - pub pact_json: Value, - pub interactions: Vec>, - pub provider_key: String, - pub provider_server: Arc>, - pub provider_info: ProviderInfo, - pub sources: Vec, - pub verification_results: VerificationExecutionResult, - pub expected_response: HttpResponse, - pub received_responses: Vec, - pub response_results: Vec, - pub expected_request: HttpRequest, - pub received_requests: Vec, - pub request_results: Vec, - pub message_proxy_port: u16, - pub received_sync_messages: Vec -} - -impl Default for V4World { - fn default() -> Self { - V4World { - scenario_id: "".to_string(), - request: Default::default(), - generated_request: Default::default(), - original_body: Default::default(), - generated_body: Default::default(), - generator_test_mode: GeneratorTestMode::Consumer, - generator_context: Default::default(), - builder: PactBuilder::new_v4("C", "P"), - integration_builder: None, - message_builder: None, - sync_message_builder: None, - pact: Default::default(), - pact_json: Default::default(), - interactions: vec![], - provider_key: "".to_string(), - provider_server: Arc::new(Mutex::new(Default::default())), - provider_info: Default::default(), - sources: vec![], - verification_results: VerificationExecutionResult::new(), - expected_response: Default::default(), - received_responses: vec![], - response_results: vec![], - expected_request: Default::default(), - received_requests: vec![], - request_results: vec![], - message_proxy_port: 0, - received_sync_messages: vec![], - } - } -} - -#[when("the Pact file for the test is generated")] -fn the_pact_file_for_the_test_is_generated(world: &mut V4World) { - if let Some(integration_builder) = world.integration_builder.as_ref() { - world.builder.push_interaction(&integration_builder.build_v4()); - } - if let Some(message_builder) = world.message_builder.as_ref() { - world.builder.push_interaction(&message_builder.build()); - } - if let Some(message_builder) = world.sync_message_builder.as_ref() { - world.builder.push_interaction(&message_builder.build()); - } - world.pact = world.builder.build().as_v4_pact().unwrap(); - world.pact_json = world.pact.to_json(PactSpecification::V4).unwrap(); -} - -#[then(expr = "there will be an interaction in the Pact file with a type of {string}")] -fn there_will_be_an_interaction_in_the_pact_file_with_a_type_of( - world: &mut V4World, - i_type: String -) -> anyhow::Result<()> { - let interactions = world.pact_json["interactions"].as_array().unwrap(); - let interaction = interactions.iter().find(|i| { - json_to_string(i.get("type").unwrap_or(&Value::Null)) == i_type - }); - if let Some(_) = interaction { - Ok(()) - } else { - Err(anyhow!("Did not find interaction in Pact JSON with type attribute {}", i_type)) - } -} - -#[then(expr = "the {numType} interaction in the Pact file will have a type of {string}")] -fn the_interaction_in_the_pact_file_will_have_a_type_of( - world: &mut V4World, - index: IndexType, - i_type: String -) -> anyhow::Result<()> { - let interactions = world.pact_json["interactions"].as_array().unwrap(); - let interaction = interactions[index.val()].as_object().unwrap(); - if let Some(interaction_type) = interaction.get("type") { - if json_to_string(interaction_type) == i_type { - Ok(()) - } else { - Err(anyhow!("Expected interaction type attribute {} but got {}", i_type, interaction_type)) - } - } else { - Err(anyhow!("Interaction in Pact JSON has no type attribute")) - } -} - -#[then(expr = "the {numType} interaction in the Pact file will have {string} = {string}")] -fn the_first_interaction_in_the_pact_file_will_have( - world: &mut V4World, - index: IndexType, - name: String, - value: String -) -> anyhow::Result<()> { - let interactions = world.pact_json["interactions"].as_array().unwrap(); - let interaction = interactions[index.val()].as_object().unwrap(); - let json: Value = serde_json::from_str(value.as_str()).unwrap(); - if let Some(actual_value) = interaction.get(name.as_str()) { - if json == *actual_value { - Ok(()) - } else { - Err(anyhow!("Expected interaction {} attribute {} but got {}", name, value, actual_value)) - } - } else { - Err(anyhow!("Interaction in Pact JSON has no {} attribute", name)) - } -} diff --git a/compatibility-suite/tests/v4_steps/sync_message_consumer.rs b/compatibility-suite/tests/v4_steps/sync_message_consumer.rs deleted file mode 100644 index 348a90477..000000000 --- a/compatibility-suite/tests/v4_steps/sync_message_consumer.rs +++ /dev/null @@ -1,607 +0,0 @@ -use std::collections::HashMap; -use std::fs::File; -use std::io::{BufReader, Read}; -use std::path::PathBuf; -use anyhow::anyhow; -use cucumber::gherkin::Step; - -use cucumber::{given, then, when}; -use pact_models::generators::Generators; -use pact_models::http_parts::HttpPart; -use pact_models::pact::{Pact, read_pact}; -use pact_models::PactSpecification; -use pact_models::path_exp::DocPath; -use pact_models::v4::message_parts::MessageContents; -use serde_json::Value; - -use pact_consumer::builders::SyncMessageInteractionBuilder; -use crate::shared_steps::{assert_value_type, IndexType}; -use crate::v4_steps::message_provider::setup_body; - -use crate::v4_steps::V4World; - -#[given("a synchronous message interaction is being defined for a consumer test")] -fn a_synchronous_message_interaction_is_being_defined_for_a_consumer_test(world: &mut V4World) { - world.sync_message_builder = Some(SyncMessageInteractionBuilder::new("synchronous message interaction")); -} - -#[given(expr = "a key of {string} is specified for the synchronous message interaction")] -fn a_key_of_is_specified_for_the_synchronous_message_interaction(world: &mut V4World, key: String) { - let builder = world.sync_message_builder.as_mut().unwrap(); - builder.with_key(key); -} - -#[given("the synchronous message interaction is marked as pending")] -fn the_synchronous_message_interaction_is_marked_as_pending(world: &mut V4World) { - let builder = world.sync_message_builder.as_mut().unwrap(); - builder.pending(true); -} - -#[given(expr = "a comment {string} is added to the synchronous message interaction")] -fn a_comment_is_added_to_the_synchronous_message_interaction(world: &mut V4World, comment: String) { - let builder = world.sync_message_builder.as_mut().unwrap(); - builder.comment(comment); -} - -#[given(expr = "the message request payload contains the {string} JSON document")] -fn the_message_request_payload_contains_the_json_document( - world: &mut V4World, - fixture: String -) -> anyhow::Result<()> { - let mut fixture = File::open(format!("pact-compatibility-suite/fixtures/{}.json", fixture))?; - let mut buffer = Vec::new(); - fixture.read_to_end(&mut buffer)?; - - let builder = world.sync_message_builder.as_mut().unwrap(); - builder.request_body(buffer, Some("application/json".into())); - - Ok(()) -} - -#[given(expr = "the message response payload contains the {string} document")] -fn the_message_response_payload_contains_the_document( - world: &mut V4World, - fixture: String -) -> anyhow::Result<()> { - let mut response = MessageContents::default(); - setup_body(&fixture, &mut response); - let builder = world.sync_message_builder.as_mut().unwrap(); - builder.response_contents(&response); - Ok(()) -} - -#[given("the message request contains the following metadata:")] -fn the_message_request_contains_the_following_metadata(world: &mut V4World, step: &Step) { - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap().iter() - .enumerate() - .map(|(index, h)| (h.clone(), index)) - .collect::>(); - for values in table.rows.iter().skip(1) { - let key = values.get(*headers.get("key").unwrap()).unwrap(); - let value = values.get(*headers.get("value").unwrap()).unwrap(); - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - Value::String(value.clone()) - }; - let builder = world.sync_message_builder.as_mut().unwrap(); - builder.request_metadata(key, json); - } - } -} - -#[given(expr = "a provider state {string} for the synchronous message is specified")] -fn a_provider_state_for_the_synchronous_message_is_specified( - world: &mut V4World, - state: String -) { - let builder = world.sync_message_builder.as_mut().unwrap(); - builder.given(state); -} - -#[given(expr = "a provider state {string} for the synchronous message is specified with the following data:")] -fn a_provider_state_for_the_synchronous_message_is_specified_with_the_following_data( - world: &mut V4World, - step: &Step, - state: String -) { - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap().iter() - .enumerate() - .map(|(index, h)| (index, h.clone())) - .collect::>(); - let params = table.rows.get(1).unwrap().iter().enumerate().map(|(i, v)| { - let key = headers.get(&i).unwrap(); - let json: Value = serde_json::from_str(v).unwrap(); - (key.clone(), json) - }).collect(); - let builder = world.sync_message_builder.as_mut().unwrap(); - builder.given_with_params(state, &Value::Object(params)); - } -} - -#[given("the message request is configured with the following:")] -fn the_message_request_is_configured_with_the_following( - world: &mut V4World, - step: &Step -) { - let builder = world.sync_message_builder.as_mut().unwrap(); - - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - let mut message = MessageContents::default(); - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "body" => { - setup_body(value, &mut message); - builder.request_contents(&message); - }, - "generators" => { - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - let f = File::open(format!("pact-compatibility-suite/fixtures/{}", value)) - .expect(format!("could not load fixture '{}'", value).as_str()); - let reader = BufReader::new(f); - serde_json::from_reader(reader).unwrap() - }; - let mut generators = Generators::default(); - generators.load_from_map(json.as_object().unwrap()).unwrap(); - message.generators = generators.clone(); - } - "metadata" => { - let json: Value = serde_json::from_str(value).unwrap(); - message.metadata.extend(json.as_object().unwrap().iter().map(|(k, v)| (k.clone(), v.clone()))); - } - _ => {} - } - } - } - builder.request_contents(&message); - } -} - -#[given("the message response is configured with the following:")] -fn the_message_response_is_configured_with_the_following( - world: &mut V4World, - step: &Step -) { - let builder = world.sync_message_builder.as_mut().unwrap(); - - if let Some(table) = step.table.as_ref() { - let headers = table.rows.first().unwrap(); - let mut message = MessageContents::default(); - for (index, value) in table.rows.get(1).unwrap().iter().enumerate() { - if let Some(field) = headers.get(index) { - match field.as_str() { - "body" => { - setup_body(value, &mut message); - builder.request_contents(&message); - }, - "generators" => { - let json: Value = if value.starts_with("JSON:") { - serde_json::from_str(value.strip_prefix("JSON:").unwrap_or(value).trim()).unwrap() - } else { - let f = File::open(format!("pact-compatibility-suite/fixtures/{}", value)) - .expect(format!("could not load fixture '{}'", value).as_str()); - let reader = BufReader::new(f); - serde_json::from_reader(reader).unwrap() - }; - let mut generators = Generators::default(); - generators.load_from_map(json.as_object().unwrap()).unwrap(); - message.generators = generators.clone(); - } - "metadata" => { - let json: Value = serde_json::from_str(value).unwrap(); - message.metadata.extend(json.as_object().unwrap().iter().map(|(k, v)| (k.clone(), v.clone()))); - } - _ => {} - } - } - } - builder.response_contents(&message); - } -} - -#[when("the message is successfully processed")] -fn the_message_is_successfully_processed(world: &mut V4World) { - if let Some(integration_builder) = world.integration_builder.as_ref() { - world.builder.push_interaction(&integration_builder.build_v4()); - } - if let Some(message_builder) = world.message_builder.as_ref() { - world.builder.push_interaction(&message_builder.build()); - } - if let Some(message_builder) = world.sync_message_builder.as_ref() { - world.builder.push_interaction(&message_builder.build()); - } - world.pact = world.builder.build().as_v4_pact().unwrap(); - world.pact_json = world.pact.to_json(PactSpecification::V4).unwrap(); - let dir = PathBuf::from("target/compatibility-suite/v4").join(&world.scenario_id); - world.received_sync_messages = world.builder.with_output_dir(dir).synchronous_messages().collect(); -} - -#[then(expr = "the received message payload will contain the {string} document")] -fn the_received_message_payload_will_contain_the_document( - world: &mut V4World, - fixture: String -) -> anyhow::Result<()> { - let mut message = MessageContents::default(); - setup_body(&fixture, &mut message); - if world.received_sync_messages.iter().find(|m| { - if let Some(response) = m.response.first() { - response.contents.value() == message.contents.value() - } else { - false - } - }).is_some() { - Ok(()) - } else { - Err(anyhow!("The required message was not received")) - } -} - -#[then(expr = "the received message content type will be {string}")] -fn the_received_message_content_type_will_be( - world: &mut V4World, - content_type: String -) -> anyhow::Result<()> { - if world.received_sync_messages.iter().find(|m| { - if let Some(response) = m.response.first() { - response.contents.content_type().unwrap_or_default().to_string() == content_type - } else { - false - } - }).is_some() { - Ok(()) - } else { - Err(anyhow!("The required message was not received")) - } -} - -#[then("the consumer test will have passed")] -fn the_consumer_test_will_have_passed(_world: &mut V4World) { - // no-op -} - -#[then("a Pact file for the message interaction will have been written")] -fn a_pact_file_for_the_message_interaction_will_have_been_written(world: &mut V4World) -> anyhow::Result<()> { - let dir = PathBuf::from("target/compatibility-suite/v4").join(&world.scenario_id); - let pact_file = dir.join("C-P.json"); - if pact_file.exists() { - let pact = read_pact(&pact_file)?; - if pact.specification_version() == PactSpecification::V4 { - world.pact = pact.as_v4_pact().unwrap(); - Ok(()) - } else { - Err(anyhow!("Expected Pact file to be V4 Pact, but was {}", pact.specification_version())) - } - } else { - Err(anyhow!("No pact file found: {}", pact_file.to_string_lossy())) - } -} - -#[then(expr = "the pact file will contain {int} interaction")] -fn the_pact_file_will_contain_interaction(world: &mut V4World, num: usize) -> anyhow::Result<()> { - if world.pact.interactions.len() == num { - Ok(()) - } else { - Err(anyhow!("Pact had {} interactions", world.pact.interactions.len())) - } -} - -#[then(expr = "the first interaction in the pact file will contain the {string} document as the request")] -fn the_first_interaction_in_the_pact_file_will_contain_the_document_as_the_request( - world: &mut V4World, - fixture: String -) -> anyhow::Result<()> { - let mut message = MessageContents::default(); - setup_body(&fixture, &mut message); - let interaction = world.pact.interactions[0].as_v4_sync_message().unwrap(); - let result = if message.content_type().unwrap_or_default().is_json() { - let json1: Value = serde_json::from_slice(&*message.contents.value().unwrap_or_default()).unwrap(); - let json2: Value = serde_json::from_slice(&*interaction.request.contents.value().unwrap_or_default()).unwrap(); - json1 == json2 - } else { - interaction.request.contents == message.contents - }; - if result { - Ok(()) - } else { - Err(anyhow!("The required message was not found")) - } -} - -#[then(expr = "the first interaction in the pact file request content type will be {string}")] -fn the_first_interaction_in_the_pact_file_request_content_type_will_be( - world: &mut V4World, - content_type: String -) -> anyhow::Result<()> { - let interaction = world.pact.interactions[0].as_v4_sync_message().unwrap(); - if interaction.request.content_type().unwrap_or_default().to_string() == content_type { - Ok(()) - } else { - Err(anyhow!("The required message was not found")) - } -} - -#[then(expr = "the first interaction in the pact file will contain the {string} document as a response")] -fn the_first_interaction_in_the_pact_file_will_contain_the_document_as_a_response( - world: &mut V4World, - fixture: String -) -> anyhow::Result<()> { - let mut message = MessageContents::default(); - setup_body(&fixture, &mut message); - let interaction = world.pact.interactions[0].as_v4_sync_message().unwrap(); - let result = if message.content_type().unwrap_or_default().is_json() { - let json1: Value = serde_json::from_slice(&*message.contents.value().unwrap_or_default()).unwrap(); - let json2: Value = serde_json::from_slice(&*interaction.response[0].contents.value().unwrap_or_default()).unwrap(); - json1 == json2 - } else { - interaction.response[0].contents == message.contents - }; - if result { - Ok(()) - } else { - Err(anyhow!("The required message was not found")) - } -} - -#[then(expr = "the first interaction in the pact file response content type will be {string}")] -fn the_first_interaction_in_the_pact_file_response_content_type_will_be( - world: &mut V4World, - content_type: String -) -> anyhow::Result<()> { - let interaction = world.pact.interactions[0].as_v4_sync_message().unwrap(); - if interaction.response[0].content_type().unwrap_or_default().to_string() == content_type { - Ok(()) - } else { - Err(anyhow!("The required message was not found")) - } -} - -#[then(expr = "the first interaction in the pact file will contain {int} response messages")] -fn the_first_interaction_in_the_pact_file_will_contain_response_messages( - world: &mut V4World, - num: usize -) -> anyhow::Result<()> { - let interaction = world.pact.interactions[0].as_v4_sync_message().unwrap(); - if interaction.response.len() == num { - Ok(()) - } else { - Err(anyhow!("The message only had {} response messages", interaction.response.len())) - } -} - -#[then(expr = "the first interaction in the pact file will contain the {string} document as the {numType} response message")] -fn the_first_interaction_in_the_pact_file_will_contain_the_document_as_the_first_response_message( - world: &mut V4World, - fixture: String, - index: IndexType -) -> anyhow::Result<()> { - let mut message = MessageContents::default(); - setup_body(&fixture, &mut message); - let interaction = world.pact.interactions[0].as_v4_sync_message().unwrap(); - let result = if message.content_type().unwrap_or_default().is_json() { - let json1: Value = serde_json::from_slice(&*message.contents.value().unwrap_or_default()).unwrap(); - let json2: Value = serde_json::from_slice(&*interaction.response[index.val()].contents.value().unwrap_or_default()).unwrap(); - json1 == json2 - } else { - interaction.response[index.val()].contents == message.contents - }; - if result { - Ok(()) - } else { - Err(anyhow!("The required message was not found")) - } -} - -#[then(expr = "the first message in the pact file will contain the request message metadata {string} == {string}")] -fn the_first_message_in_the_pact_file_will_contain_the_request_message_metadata( - world: &mut V4World, - key: String, - value: String -) -> anyhow::Result<()> { - let json: Value = if value.starts_with("JSON:") { - let value_str = value.strip_prefix("JSON:") - .unwrap_or(value.as_str()) - .trim() - .replace("\\\"", "\""); - serde_json::from_str(value_str.as_str()).unwrap() - } else { - Value::String(value.clone()) - }; - let interaction = world.pact.interactions[0].as_v4_sync_message().unwrap(); - if json == *interaction.request.metadata.get(&key).unwrap() { - Ok(()) - } else { - Err(anyhow!("The required message was not received")) - } -} - -#[then(expr = "the first message in the pact file will contain {int} provider state(s)")] -fn the_first_message_in_the_pact_file_will_contain_provider_states( - world: &mut V4World, - states: usize -) -> anyhow::Result<()> { - let interaction = world.pact.interactions[0].as_v4_sync_message().unwrap(); - if interaction.provider_states.len() == states { - Ok(()) - } else { - Err(anyhow!("The message had {} provider states", interaction.provider_states.len())) - } -} - -#[then(expr = "the first message in the Pact file will contain provider state {string}")] -fn the_first_message_in_the_pact_file_will_contain_provider_state( - world: &mut V4World, - state: String -) -> anyhow::Result<()> { - let interaction = world.pact.interactions[0].as_v4_sync_message().unwrap(); - if interaction.provider_states.iter().find(|ps| ps.name == state).is_some() { - Ok(()) - } else { - Err(anyhow!("The message did not have '{}' provider state", state)) - } -} - -#[then(expr = "the provider state {string} for the message will contain the following parameters:")] -fn the_provider_state_for_the_message_will_contain_the_following_parameters( - world: &mut V4World, - step: &Step, - state: String -) -> anyhow::Result<()> { - let table = step.table.as_ref().unwrap(); - let params_str = table.rows.get(1).unwrap().first().unwrap(); - let params = serde_json::from_str::(params_str.as_str()) - .unwrap() - .as_object() - .unwrap() - .iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - let interaction = world.pact.interactions[0].as_v4_sync_message().unwrap(); - let provider_state = interaction.provider_states.iter().find(|ps| ps.name == state).unwrap(); - if provider_state.params == params { - Ok(()) - } else { - Err(anyhow!("Expected provider state '{}' to have parameters {:?} but were {:?}", state, - params, provider_state.params)) - } -} - -#[then(expr = "the message request contents for {string} will have been replaced with a(n) {string}")] -fn the_message_contents_for_will_have_been_replaced_with_an( - world: &mut V4World, - path: String, - value_type: String -) -> anyhow::Result<()> { - let message = world.pact.interactions[0].as_v4_sync_message().unwrap(); - let path = DocPath::new(path).unwrap(); - let original_json: Value = serde_json::from_str(message.request.contents.value_as_string().unwrap().as_str()).unwrap(); - let original_element = original_json.pointer(path.as_json_pointer().unwrap().as_str()).unwrap(); - let json: Value = serde_json::from_str(world.received_sync_messages.first().unwrap().request.contents.value_as_string().unwrap().as_str()).unwrap(); - let element = json.pointer(path.as_json_pointer().unwrap().as_str()).unwrap(); - - if element == original_element { - return Err(anyhow!("Expected original ({:?}) to have been replaced", original_element)) - } - - assert_value_type(value_type, element) -} - -#[then(expr = "the message response contents for {string} will have been replaced with a(n) {string}")] -fn the_message_response_contents_for_will_have_been_replaced_with_an( - world: &mut V4World, - path: String, - value_type: String -) -> anyhow::Result<()> { - let message = world.pact.interactions[0].as_v4_sync_message().unwrap(); - let response = &message.response[0]; - let path = DocPath::new(path).unwrap(); - let original_json: Value = serde_json::from_str(response.contents.value_as_string().unwrap().as_str()).unwrap(); - let original_element = original_json.pointer(path.as_json_pointer().unwrap().as_str()).unwrap(); - let received_response = &world.received_sync_messages.first().unwrap().response[0]; - let json: Value = serde_json::from_str(received_response.contents.value_as_string().unwrap().as_str()).unwrap(); - let element = json.pointer(path.as_json_pointer().unwrap().as_str()).unwrap(); - - if element == original_element { - return Err(anyhow!("Expected original ({:?}) to have been replaced", original_element)) - } - - assert_value_type(value_type, element) -} - -#[then(expr = "the received message request metadata will contain {string} == {string}")] -fn the_received_message_request_metadata_will_contain( - world: &mut V4World, - key: String, - value: String -) -> anyhow::Result<()> { - let json: Value = if value.starts_with("JSON:") { - let value_str = value.strip_prefix("JSON:") - .unwrap_or(value.as_str()) - .trim() - .replace("\\\"", "\""); - serde_json::from_str(value_str.as_str()).unwrap() - } else { - Value::String(value.clone()) - }; - if world.received_sync_messages.iter().find(|m| { - if let Some(value) = m.request.metadata.get(&key) { - *value == json - } else { - false - } - }).is_some() { - Ok(()) - } else { - Err(anyhow!("The required message was not received")) - } -} - -#[then(expr = "the received message request metadata will contain {string} replaced with a(n) {string}")] -fn the_received_message_request_metadata_will_contain_replaced_with_an( - world: &mut V4World, - key: String, - value_type: String -) -> anyhow::Result<()> { - let message = world.pact.interactions[0].as_v4_sync_message().unwrap(); - let original_json = message.request.metadata.get(&key).unwrap(); - let received = &world.received_sync_messages.first().unwrap().request; - let json = received.metadata.get(&key).unwrap(); - - if json == original_json { - return Err(anyhow!("Expected original ({:?}) to have been replaced", original_json)) - } - - assert_value_type(value_type, json) -} - -#[then(expr = "the received message response metadata will contain {string} == {string}")] -fn the_received_message_response_metadata_will_contain( - world: &mut V4World, - key: String, - value: String -) -> anyhow::Result<()> { - let json: Value = if value.starts_with("JSON:") { - let value_str = value.strip_prefix("JSON:") - .unwrap_or(value.as_str()) - .trim() - .replace("\\\"", "\""); - serde_json::from_str(value_str.as_str()).unwrap() - } else { - Value::String(value.clone()) - }; - if world.received_sync_messages.iter().find(|m| { - if let Some(value) = m.response[0].metadata.get(&key) { - *value == json - } else { - false - } - }).is_some() { - Ok(()) - } else { - Err(anyhow!("The required message was not received")) - } -} - -#[then(expr = "the received message response metadata will contain {string} replaced with a(n) {string}")] -fn the_received_message_response_metadata_will_contain_replaced_with_an( - world: &mut V4World, - key: String, - value_type: String -) -> anyhow::Result<()> { - let message = world.pact.interactions[0].as_v4_sync_message().unwrap(); - let response = &message.response[0]; - let original_json = response.metadata.get(&key).unwrap(); - let received_response = &world.received_sync_messages.first().unwrap().response[0]; - let json = received_response.metadata.get(&key).unwrap(); - - if json == original_json { - return Err(anyhow!("Expected original ({:?}) to have been replaced", original_json)) - } - - assert_value_type(value_type, json) -} diff --git a/javascript/README.md b/javascript/README.md deleted file mode 100644 index 5e04cf4d0..000000000 --- a/javascript/README.md +++ /dev/null @@ -1,18 +0,0 @@ -To run the JavaScript examples, the pact_ffi Crate (which now also contains the -mock_server) first needs to be built using `cargo build` in the `rust/pact_ffi` -directory. - -This should result in the appropriate library file(s) being created for your OS, -i.e. `rust/target/debug/libpact_ffi.[dll|so|dylib]` - -1. run `npm install` -2. run `npm run simple_pact` - -**NOTE:** This example needs to run on Node 10. - -To change the log level, use the `RUST_LOG` environment variable. I.e., to set -debug level: `RUST_LOG=debug npm run simple_pact` - -To run the failing example: - - $ npm run simple_pact_error diff --git a/javascript/lib/simple_pact.js b/javascript/lib/simple_pact.js deleted file mode 100644 index cca18f9c4..000000000 --- a/javascript/lib/simple_pact.js +++ /dev/null @@ -1,90 +0,0 @@ -var ffi = require('ffi'); -var path = require('path'); -const http = require('http'); -const net = require('net'); -const url = require('url'); - -var dll = '../../rust/target/debug/libpact_ffi'; -var lib = ffi.Library(path.join(__dirname, dll), { - pactffi_create_mock_server: ['int32', ['string', 'string']], - pactffi_mock_server_matched: ['bool', ['int32']], - pactffi_cleanup_mock_server: ['bool', ['int32']] -}); - -var pact = `{ - "provider": { - "name": "Alice Service" - }, - "consumer": { - "name": "Consumer" - }, - "interactions": [ - { - "description": "a retrieve Mallory request", - "request": { - "method": "GET", - "path": "/mallory", - "query": "name=ron&status=good" - }, - "response": { - "status": 200, - "headers": { - "Content-Type": "text/html" - }, - "body": "That is some good Mallory." - } - } - ], - "metadata": { - "pact-specification": { - "version": "1.0.0" - }, - "pact-jvm": { - "version": "1.0.0" - } - } -}`; - -var port = lib.pactffi_create_mock_server(pact, '127.0.0.1:0'); -console.log('Mock server port=' + port); - -if (!lib.pactffi_mock_server_matched(port)) { - console.log("No requests yet, as expected"); -} else { - console.log("Hmm, something smells a bit off."); -} - -var options = { - hostname: 'localhost', - port: port, - path: '/mallory?name=ron&status=good', - method: 'GET', - headers: { - 'Content-Type': 'application/json' - } -}; - -var req = http.request(options, (res) => { - console.log(`STATUS: ${res.statusCode}`); - console.log(`HEADERS: ${JSON.stringify(res.headers)}`); - res.setEncoding('utf8'); - res.on('data', (chunk) => { - console.log(`BODY: ${chunk}`); - }); - res.on('end', () => { - console.log('No more data in response.'); - if (lib.pactffi_mock_server_matched(port)) { - console.log("Mock server matched all requests, Yay!"); - } else { - console.log("We got some mismatches, Boo!"); - } - - lib.pactffi_cleanup_mock_server(port); - }) -}); - -req.on('error', (e) => { - console.log(`problem with request: ${e.message}`); -}); - -req.end(); diff --git a/javascript/lib/simple_pact_error.js b/javascript/lib/simple_pact_error.js deleted file mode 100644 index f631317d2..000000000 --- a/javascript/lib/simple_pact_error.js +++ /dev/null @@ -1,167 +0,0 @@ -var ffi = require('ffi'); -var path = require('path'); -const http = require('http'); -const net = require('net'); -const url = require('url'); - -var dll = '../../rust/target/debug/libpact_ffi'; -var lib = ffi.Library(path.join(__dirname, dll), { - pactffi_create_mock_server: ['int32', ['string', 'string']], - pactffi_mock_server_matched: ['bool', ['int32']], - pactffi_mock_server_mismatches: ['string', ['int32']], - pactffi_cleanup_mock_server: ['bool', ['int32']] -}); - -var pact = `{ - "provider": { - "name": "test_provider" - }, - "consumer": { - "name": "test_consumer" - }, - "interactions": [ - { - "providerState": "test state", - "description": "test interaction", - "request": { - "method": "POST", - "path": "/", - "body": { - "complete": { - "certificateUri": "http://...", - "issues": { - "idNotFound": {} - }, - "nevdis": { - "body": null, - "colour": null, - "engine": null - }, - "body": 123456 - }, - "body": [ - 1, - 2, - 3 - ] - } - }, - "response": { - "status": 200 - } - } - ], - "metadata": { - "pact-specification": { - "version": "2.0.0" - }, - "pact-jvm": { - "version": "" - } - } -}`; - -var port = lib.pactffi_create_mock_server(pact, '127.0.0.1:0'); -console.log("Mock server port=" + port); - -if (!lib.pactffi_mock_server_matched(port)) { - console.log("No requests yet, as expected"); -} else { - console.log("Hmm, something smells a bit off."); -} - -var options = { - hostname: 'localhost', - port: port, - path: '/', - method: 'POST', - headers: { - 'Content-Type': 'application/json' - } -}; - -var request1_done = false; -var request2_done = false; - -var req1 = http.request(options, (res) => { - console.log(`STATUS: ${res.statusCode}`); - console.log(`HEADERS: ${JSON.stringify(res.headers)}`); - res.setEncoding('utf8'); - res.on('data', (chunk) => { - console.log(`BODY: ${chunk}`); - }); - res.on('end', () => { - console.log('No more data in response.'); - request1_done = true; - }) -}); - -req1.on('error', (e) => { - console.log(`problem with request: ${e.message}`); - request1_done = true; -}); -req1.write(JSON.stringify({ - "complete": { - "certificateUri": "http://...", - "issues": {}, - "nevdis": { - "body": "red", - "colour": null, - "engine": null - }, - "body": "123456" - }, - "body": [1, 3] -})); -req1.end(); - -var options2 = { - hostname: 'localhost', - port: port, - path: '/mallory', - method: 'PUT', - headers: { - 'Content-Type': 'application/json' - } -}; - -var req2 = http.request(options2, (res) => { - console.log(`STATUS: ${res.statusCode}`); - console.log(`HEADERS: ${JSON.stringify(res.headers)}`); - res.setEncoding('utf8'); - res.on('data', (chunk) => { - console.log(`BODY: ${chunk}`); - }); - res.on('end', () => { - console.log('No more data in response.'); - request2_done = true; - }) -}); - -req2.on('error', (e) => { - console.log(`problem with request: ${e.message}`); - request2_done = true; -}); -req2.write(JSON.stringify({})); -req2.end(); - -var waitForResult; -waitForResult = function () { - console.log('.'); - if (!request1_done || !request2_done) { - setTimeout(waitForResult, 1000); - } else { - console.log("-----------------------------------------------"); - if (lib.pactffi_mock_server_matched(port)) { - console.log("Mock server matched all requests, That Is Not Good (tm)"); - } else { - console.log("We got some mismatches, as expected."); - var mismatch_json = lib.pactffi_mock_server_mismatches(port); - console.log(mismatch_json); - console.log(); - console.log(JSON.stringify(JSON.parse(mismatch_json), null, 4)); - } - lib.pactffi_cleanup_mock_server(port); - } -}; -setTimeout(waitForResult, 1000); diff --git a/javascript/package.json b/javascript/package.json deleted file mode 100644 index d473475a4..000000000 --- a/javascript/package.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "name": "pact-reference-js", - "version": "1.0.0", - "description": "Reference implementation of calling pact mock server from JS", - "main": "index.js", - "dependencies": { - "ffi": "^2.3.0" - }, - "devDependencies": {}, - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1", - "simple_pact": "node lib/simple_pact.js", - "simple_pact_error": "node lib/simple_pact_error.js" - }, - "author": "", - "license": "ISC" -} diff --git a/javascript/yarn.lock b/javascript/yarn.lock deleted file mode 100644 index 538d7d5ab..000000000 --- a/javascript/yarn.lock +++ /dev/null @@ -1,51 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -bindings@1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.3.0.tgz#b346f6ecf6a95f5a815c5839fc7cdb22502f1ed7" - -bindings@~1.2.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.2.1.tgz#14ad6113812d2d37d72e67b4cacb4bb726505f11" - -debug@2: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - dependencies: - ms "2.0.0" - -ffi@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/ffi/-/ffi-2.3.0.tgz#fa1a7b3d85c0fa8c83d96947a64b5192bc47f858" - integrity sha512-vkPA9Hf9CVuQ5HeMZykYvrZF2QNJ/iKGLkyDkisBnoOOFeFXZQhUPxBARPBIZMJVulvBI2R+jgofW03gyPpJcQ== - dependencies: - bindings "~1.2.0" - debug "2" - nan "2" - ref "1" - ref-struct "1" - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - -nan@2: - version "2.10.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.10.0.tgz#96d0cd610ebd58d4b4de9cc0c6828cda99c7548f" - -ref-struct@1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/ref-struct/-/ref-struct-1.1.0.tgz#5d5ee65ad41cefc3a5c5feb40587261e479edc13" - dependencies: - debug "2" - ref "1" - -ref@1: - version "1.3.5" - resolved "https://registry.yarnpkg.com/ref/-/ref-1.3.5.tgz#0e33f080cdb94a3d95312b2b3b1fd0f82044ca0f" - dependencies: - bindings "1" - debug "2" - nan "2" diff --git a/php/README.md b/php/README.md deleted file mode 100644 index 56e62c484..000000000 --- a/php/README.md +++ /dev/null @@ -1,15 +0,0 @@ -To run the php examples, the mock server DLL needs to be built using `cargo build` in the `rust/libpact_ffi` directory. - -1. run `composer install` -2. run consumers - 1. `composer consumer-1-matches` - 2. `composer consumer-1-mismatches` - 3. `composer consumer-2-matches` - 4. `composer consumer-2-mismatches` -3. run provider - 1. `composer provider` - -**NOTE:** This example needs to run on PHP >= 7.4. - -To change the log level, use the `LOG_LEVEL` environment variable. I.e., to set -debug level: `LOG_LEVEL=debug composer consumer-1-matches` diff --git a/php/composer.json b/php/composer.json deleted file mode 100644 index f1887ffbc..000000000 --- a/php/composer.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "pact-reference/php", - "description": "Reference implementation of calling pact mock server from PHP", - "license": "MIT", - "authors": [ - { - "name": "Tien Vo", - "email": "tien.xuan.vo@gmail.com" - } - ], - "require": { - "php": ">=7.4", - "ext-ffi": "*", - "ext-json": "*", - "symfony/http-client": "^5.3", - "slim/slim": "^4.8", - "slim/psr7": "^1.4", - "symfony/process": "^5.3" - }, - "scripts": { - "consumer-1-matches": "MATCHING=1 php src/consumer-1.php", - "consumer-1-mismatches": "MATCHING=0 php src/consumer-1.php", - "consumer-2-matches": "MATCHING=1 php src/consumer-2.php", - "consumer-2-mismatches": "MATCHING=0 php src/consumer-2.php", - "consumer-plugin": "MATCHING=1 php src/consumer-plugin.php", - "provider": "php src/provider.php" - } -} diff --git a/php/pacts/area-calculator-consumer-area-calculator-provider.json b/php/pacts/area-calculator-consumer-area-calculator-provider.json deleted file mode 100644 index 7807ac937..000000000 --- a/php/pacts/area-calculator-consumer-area-calculator-provider.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "consumer": { - "name": "area-calculator-consumer" - }, - "interactions": [ - { - "description": "A gRPC calculateOne request", - "interactionMarkup": { - "markup": "```protobuf\nmessage AreaResponse {\n repeated float value = 1;\n}\n```\n", - "markupType": "COMMON_MARK" - }, - "key": "ab98c08a2562ef2e", - "pending": false, - "pluginConfiguration": { - "protobuf": { - "descriptorKey": "a85dff8f82655a9681aad113575dcfbb", - "service": "Calculator/calculateOne" - } - }, - "transport": "grpc", - "request": { - "contents": { - "content": "EgoNAABAQBUAAIBA", - "contentType": "application/protobuf;message=ShapeMessage", - "contentTypeHint": "BINARY", - "encoded": "base64" - }, - "matchingRules": { - "body": { - "$.rectangle.length": { - "combine": "AND", - "matchers": [ - { - "match": "number" - } - ] - }, - "$.rectangle.width": { - "combine": "AND", - "matchers": [ - { - "match": "number" - } - ] - } - } - } - }, - "response": [ - { - "contents": { - "content": "CgQAAEBB", - "contentType": "application/protobuf;message=AreaResponse", - "contentTypeHint": "BINARY", - "encoded": "base64" - }, - "matchingRules": { - "body": { - "$.value[0].*": { - "combine": "AND", - "matchers": [ - { - "match": "number" - } - ] - } - } - } - } - ], - "type": "Synchronous/Messages" - } - ], - "metadata": { - "pact-deno-ffi": { - "ffi": "0.3.15" - }, - "pactRust": { - "ffi": "0.3.15", - "mockserver": "0.9.6", - "models": "1.0.1" - }, - "pactSpecification": { - "version": "4.0" - }, - "plugins": [ - { - "configuration": { - "a85dff8f82655a9681aad113575dcfbb": { - "protoDescriptors": "CsoHChVhcmVhX2NhbGN1bGF0b3IucHJvdG8SD2FyZWFfY2FsY3VsYXRvciK6AgoMU2hhcGVNZXNzYWdlEjEKBnNxdWFyZRgBIAEoCzIXLmFyZWFfY2FsY3VsYXRvci5TcXVhcmVIAFIGc3F1YXJlEjoKCXJlY3RhbmdsZRgCIAEoCzIaLmFyZWFfY2FsY3VsYXRvci5SZWN0YW5nbGVIAFIJcmVjdGFuZ2xlEjEKBmNpcmNsZRgDIAEoCzIXLmFyZWFfY2FsY3VsYXRvci5DaXJjbGVIAFIGY2lyY2xlEjcKCHRyaWFuZ2xlGAQgASgLMhkuYXJlYV9jYWxjdWxhdG9yLlRyaWFuZ2xlSABSCHRyaWFuZ2xlEkYKDXBhcmFsbGVsb2dyYW0YBSABKAsyHi5hcmVhX2NhbGN1bGF0b3IuUGFyYWxsZWxvZ3JhbUgAUg1wYXJhbGxlbG9ncmFtQgcKBXNoYXBlIikKBlNxdWFyZRIfCgtlZGdlX2xlbmd0aBgBIAEoAlIKZWRnZUxlbmd0aCI5CglSZWN0YW5nbGUSFgoGbGVuZ3RoGAEgASgCUgZsZW5ndGgSFAoFd2lkdGgYAiABKAJSBXdpZHRoIiAKBkNpcmNsZRIWCgZyYWRpdXMYASABKAJSBnJhZGl1cyJPCghUcmlhbmdsZRIVCgZlZGdlX2EYASABKAJSBWVkZ2VBEhUKBmVkZ2VfYhgCIAEoAlIFZWRnZUISFQoGZWRnZV9jGAMgASgCUgVlZGdlQyJICg1QYXJhbGxlbG9ncmFtEh8KC2Jhc2VfbGVuZ3RoGAEgASgCUgpiYXNlTGVuZ3RoEhYKBmhlaWdodBgCIAEoAlIGaGVpZ2h0IkQKC0FyZWFSZXF1ZXN0EjUKBnNoYXBlcxgBIAMoCzIdLmFyZWFfY2FsY3VsYXRvci5TaGFwZU1lc3NhZ2VSBnNoYXBlcyIkCgxBcmVhUmVzcG9uc2USFAoFdmFsdWUYASADKAJSBXZhbHVlMq0BCgpDYWxjdWxhdG9yEk4KDGNhbGN1bGF0ZU9uZRIdLmFyZWFfY2FsY3VsYXRvci5TaGFwZU1lc3NhZ2UaHS5hcmVhX2NhbGN1bGF0b3IuQXJlYVJlc3BvbnNlIgASTwoOY2FsY3VsYXRlTXVsdGkSHC5hcmVhX2NhbGN1bGF0b3IuQXJlYVJlcXVlc3QaHS5hcmVhX2NhbGN1bGF0b3IuQXJlYVJlc3BvbnNlIgBCHFoXaW8ucGFjdC9hcmVhX2NhbGN1bGF0b3LQAgFiBnByb3RvMw==", - "protoFile": "syntax = \"proto3\";\n\npackage area_calculator;\n\noption php_generic_services = true;\noption go_package = \"io.pact/area_calculator\";\n\nservice Calculator {\n rpc calculateOne (ShapeMessage) returns (AreaResponse) {}\n rpc calculateMulti (AreaRequest) returns (AreaResponse) {}\n}\n\nmessage ShapeMessage {\n oneof shape {\n Square square = 1;\n Rectangle rectangle = 2;\n Circle circle = 3;\n Triangle triangle = 4;\n Parallelogram parallelogram = 5;\n }\n}\n\nmessage Square {\n float edge_length = 1;\n}\n\nmessage Rectangle {\n float length = 1;\n float width = 2;\n}\n\nmessage Circle {\n float radius = 1;\n}\n\nmessage Triangle {\n float edge_a = 1;\n float edge_b = 2;\n float edge_c = 3;\n}\n\nmessage Parallelogram {\n float base_length = 1;\n float height = 2;\n}\n\nmessage AreaRequest {\n repeated ShapeMessage shapes = 1;\n}\n\nmessage AreaResponse {\n repeated float value = 1;\n}\n" - } - }, - "name": "protobuf", - "version": "0.2.0" - } - ] - }, - "provider": { - "name": "area-calculator-provider" - } -} \ No newline at end of file diff --git a/php/pacts/grpc-consumer-php-area-calculator-provider.json b/php/pacts/grpc-consumer-php-area-calculator-provider.json deleted file mode 100644 index 475995071..000000000 --- a/php/pacts/grpc-consumer-php-area-calculator-provider.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "consumer": { - "name": "grpc-consumer-php" - }, - "interactions": [ - { - "description": "A gRPC calculateOne request", - "interactionMarkup": { - "markup": "```protobuf\nmessage AreaResponse {\n repeated float value = 1;\n}\n```\n", - "markupType": "COMMON_MARK" - }, - "key": "ab98c08a2562ef2e", - "pending": false, - "pluginConfiguration": { - "protobuf": { - "descriptorKey": "a85dff8f82655a9681aad113575dcfbb", - "service": "Calculator/calculateOne" - } - }, - "request": { - "contents": { - "content": "EgoNAABAQBUAAIBA", - "contentType": "application/protobuf;message=ShapeMessage", - "contentTypeHint": "BINARY", - "encoded": "base64" - }, - "matchingRules": { - "body": { - "$.rectangle.length": { - "combine": "AND", - "matchers": [ - { - "match": "number" - } - ] - }, - "$.rectangle.width": { - "combine": "AND", - "matchers": [ - { - "match": "number" - } - ] - } - } - } - }, - "response": [ - { - "contents": { - "content": "CgQAAEBB", - "contentType": "application/protobuf;message=AreaResponse", - "contentTypeHint": "BINARY", - "encoded": "base64" - }, - "matchingRules": { - "body": { - "$.value[0].*": { - "combine": "AND", - "matchers": [ - { - "match": "number" - } - ] - } - } - } - } - ], - "transport": "grpc", - "type": "Synchronous/Messages" - } - ], - "metadata": { - "pact-php": { - "ffi": "0.3.14" - }, - "pactRust": { - "ffi": "0.3.14", - "mockserver": "0.9.5", - "models": "1.0.0" - }, - "pactSpecification": { - "version": "4.0" - }, - "plugins": [ - { - "configuration": { - "a85dff8f82655a9681aad113575dcfbb": { - "protoDescriptors": "CsoHChVhcmVhX2NhbGN1bGF0b3IucHJvdG8SD2FyZWFfY2FsY3VsYXRvciK6AgoMU2hhcGVNZXNzYWdlEjEKBnNxdWFyZRgBIAEoCzIXLmFyZWFfY2FsY3VsYXRvci5TcXVhcmVIAFIGc3F1YXJlEjoKCXJlY3RhbmdsZRgCIAEoCzIaLmFyZWFfY2FsY3VsYXRvci5SZWN0YW5nbGVIAFIJcmVjdGFuZ2xlEjEKBmNpcmNsZRgDIAEoCzIXLmFyZWFfY2FsY3VsYXRvci5DaXJjbGVIAFIGY2lyY2xlEjcKCHRyaWFuZ2xlGAQgASgLMhkuYXJlYV9jYWxjdWxhdG9yLlRyaWFuZ2xlSABSCHRyaWFuZ2xlEkYKDXBhcmFsbGVsb2dyYW0YBSABKAsyHi5hcmVhX2NhbGN1bGF0b3IuUGFyYWxsZWxvZ3JhbUgAUg1wYXJhbGxlbG9ncmFtQgcKBXNoYXBlIikKBlNxdWFyZRIfCgtlZGdlX2xlbmd0aBgBIAEoAlIKZWRnZUxlbmd0aCI5CglSZWN0YW5nbGUSFgoGbGVuZ3RoGAEgASgCUgZsZW5ndGgSFAoFd2lkdGgYAiABKAJSBXdpZHRoIiAKBkNpcmNsZRIWCgZyYWRpdXMYASABKAJSBnJhZGl1cyJPCghUcmlhbmdsZRIVCgZlZGdlX2EYASABKAJSBWVkZ2VBEhUKBmVkZ2VfYhgCIAEoAlIFZWRnZUISFQoGZWRnZV9jGAMgASgCUgVlZGdlQyJICg1QYXJhbGxlbG9ncmFtEh8KC2Jhc2VfbGVuZ3RoGAEgASgCUgpiYXNlTGVuZ3RoEhYKBmhlaWdodBgCIAEoAlIGaGVpZ2h0IkQKC0FyZWFSZXF1ZXN0EjUKBnNoYXBlcxgBIAMoCzIdLmFyZWFfY2FsY3VsYXRvci5TaGFwZU1lc3NhZ2VSBnNoYXBlcyIkCgxBcmVhUmVzcG9uc2USFAoFdmFsdWUYASADKAJSBXZhbHVlMq0BCgpDYWxjdWxhdG9yEk4KDGNhbGN1bGF0ZU9uZRIdLmFyZWFfY2FsY3VsYXRvci5TaGFwZU1lc3NhZ2UaHS5hcmVhX2NhbGN1bGF0b3IuQXJlYVJlc3BvbnNlIgASTwoOY2FsY3VsYXRlTXVsdGkSHC5hcmVhX2NhbGN1bGF0b3IuQXJlYVJlcXVlc3QaHS5hcmVhX2NhbGN1bGF0b3IuQXJlYVJlc3BvbnNlIgBCHFoXaW8ucGFjdC9hcmVhX2NhbGN1bGF0b3LQAgFiBnByb3RvMw==", - "protoFile": "syntax = \"proto3\";\n\npackage area_calculator;\n\noption php_generic_services = true;\noption go_package = \"io.pact/area_calculator\";\n\nservice Calculator {\n rpc calculateOne (ShapeMessage) returns (AreaResponse) {}\n rpc calculateMulti (AreaRequest) returns (AreaResponse) {}\n}\n\nmessage ShapeMessage {\n oneof shape {\n Square square = 1;\n Rectangle rectangle = 2;\n Circle circle = 3;\n Triangle triangle = 4;\n Parallelogram parallelogram = 5;\n }\n}\n\nmessage Square {\n float edge_length = 1;\n}\n\nmessage Rectangle {\n float length = 1;\n float width = 2;\n}\n\nmessage Circle {\n float radius = 1;\n}\n\nmessage Triangle {\n float edge_a = 1;\n float edge_b = 2;\n float edge_c = 3;\n}\n\nmessage Parallelogram {\n float base_length = 1;\n float height = 2;\n}\n\nmessage AreaRequest {\n repeated ShapeMessage shapes = 1;\n}\n\nmessage AreaResponse {\n repeated float value = 1;\n}" - } - }, - "name": "protobuf", - "version": "0.1.17" - } - ] - }, - "provider": { - "name": "area-calculator-provider" - } -} \ No newline at end of file diff --git a/php/pacts/http-consumer-1-http-provider.json b/php/pacts/http-consumer-1-http-provider.json deleted file mode 100644 index 3c6187f16..000000000 --- a/php/pacts/http-consumer-1-http-provider.json +++ /dev/null @@ -1,151 +0,0 @@ -{ - "consumer": { - "name": "http-consumer-1" - }, - "interactions": [ - { - "description": "A POST request to create book", - "providerStates": [ - { - "name": "No book fixtures required" - } - ], - "request": { - "body": { - "author": "Margaret Atwood", - "description": "Brilliantly conceived and executed, this powerful evocation of twenty-first century America gives full rein to Margaret Atwood's devastating irony, wit and astute perception.", - "isbn": "0099740915", - "publicationDate": "1985-07-31T00:00:00+00:00", - "title": "The Handmaid's Tale" - }, - "headers": { - "Content-Type": "application/json" - }, - "matchingRules": { - "body": { - "$.author": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.description": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.isbn": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.publicationDate": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z)$" - } - ] - }, - "$.title": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - } - }, - "header": {} - }, - "method": "POST", - "path": "/api/books" - }, - "response": { - "body": { - "@context": "/api/contexts/Book", - "@id": "/api/books/0114b2a8-3347-49d8-ad99-0e792c5a30e6", - "@type": "Book", - "author": "Melisa Kassulke", - "description": "Quaerat odit quia nisi accusantium natus voluptatem. Explicabo corporis eligendi ut ut sapiente ut qui quidem. Optio amet velit aut delectus. Sed alias asperiores perspiciatis deserunt omnis. Mollitia unde id in.", - "publicationDate": "1999-02-13T00:00:00+07:00", - "reviews": [], - "title": "Voluptas et tempora repellat corporis excepturi." - }, - "headers": { - "Content-Type": "application/ld+json; charset=utf-8" - }, - "matchingRules": { - "body": { - "$.author": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.description": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.publicationDate": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z)$" - } - ] - }, - "$.title": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$['@id']": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "^\\/api\\/books\\/[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$" - } - ] - } - }, - "header": {} - }, - "status": 201 - } - } - ], - "metadata": { - "pactRust": { - "ffi": "0.3.14", - "mockserver": "0.9.5", - "models": "1.0.0" - }, - "pactSpecification": { - "version": "3.0.0" - } - }, - "provider": { - "name": "http-provider" - } -} \ No newline at end of file diff --git a/php/pacts/http-consumer-2-http-provider.json b/php/pacts/http-consumer-2-http-provider.json deleted file mode 100644 index 227a5e436..000000000 --- a/php/pacts/http-consumer-2-http-provider.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "consumer": { - "name": "http-consumer-2" - }, - "interactions": [ - { - "description": "A PUT request to generate book cover", - "providerStates": [ - { - "name": "A book with id fb5a885f-f7e8-4a50-950f-c1a64a94d500 is required" - } - ], - "request": { - "body": [], - "headers": { - "Content-Type": "application/json" - }, - "method": "PUT", - "path": "/api/books/fb5a885f-f7e8-4a50-950f-c1a64a94d500/generate-cover" - }, - "response": { - "status": 204 - } - } - ], - "metadata": { - "pactRust": { - "ffi": "0.3.14", - "mockserver": "0.9.5", - "models": "1.0.0" - }, - "pactSpecification": { - "version": "3.0.0" - } - }, - "provider": { - "name": "http-provider" - } -} \ No newline at end of file diff --git a/php/pacts/message-consumer-2-message-provider.json b/php/pacts/message-consumer-2-message-provider.json deleted file mode 100644 index 27138795c..000000000 --- a/php/pacts/message-consumer-2-message-provider.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "consumer": { - "name": "message-consumer-2" - }, - "messages": [ - { - "contents": { - "uuid": "fb5a885f-f7e8-4a50-950f-c1a64a94d500" - }, - "description": "Book (id fb5a885f-f7e8-4a50-950f-c1a64a94d500) created message", - "matchingRules": { - "body": { - "$.uuid": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$" - } - ] - } - } - }, - "metadata": { - "contentType": "application/json" - }, - "providerStates": [ - { - "name": "A book with id fb5a885f-f7e8-4a50-950f-c1a64a94d500 is required" - } - ] - } - ], - "metadata": { - "pactRust": { - "ffi": "0.3.14", - "models": "1.0.0" - }, - "pactSpecification": { - "version": "3.0.0" - } - }, - "provider": { - "name": "message-provider" - } -} \ No newline at end of file diff --git a/php/public/index.php b/php/public/index.php deleted file mode 100644 index c034ba6a9..000000000 --- a/php/public/index.php +++ /dev/null @@ -1,34 +0,0 @@ -post('/api/books', function (Request $request, Response $response, $args) { - $response->getBody()->write(json_encode([ - '@context' => '/api/contexts/Book', - '@id' => '/api/books/bb50b187-ff02-422c-886f-b58dc4e0adca', - '@type' => 'Book', - 'title' => 'Lorem ipsum dolor sit amet.', - 'description' => 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Mauris a neque erat. Donec laoreet justo.', - 'author' => 'Mrs. Samanta Gerhold', - 'publicationDate' => '2002-05-26T11:41:12+07:00', - 'reviews' => [], - ])); - - return $response - ->withHeader('Content-Type', 'application/ld+json; charset=utf-8') - ->withStatus(201); -}); - -$app->put('/api/books/{id}/generate-cover', function (Request $request, Response $response, $args) { - $response->getBody()->write('[]'); - - return $response->withStatus(204); -}); - -$app->run(); diff --git a/php/public/proxy.php b/php/public/proxy.php deleted file mode 100644 index ab389437b..000000000 --- a/php/public/proxy.php +++ /dev/null @@ -1,52 +0,0 @@ -addBodyParsingMiddleware(); - -$app->post('/', function(Request $request, Response $response) { - $body = $request->getParsedBody(); - switch ($body['description']) { - case 'Book (id fb5a885f-f7e8-4a50-950f-c1a64a94d500) created message': - $response->getBody()->write(json_encode([ - 'uuid' => '90d0f930-b1c6-48b6-b351-88f6c2b5aa9e', - ])); - return $response->withHeader('Content-Type', 'application/json'); - - default: - break; - } - // What to do with $body['providerStates'] ? - - return $response; -}); - -$app->post('/change-state', function(Request $request, Response $response) { - $body = $request->getParsedBody(); - switch ($body['state']) { - case 'A book with id fb5a885f-f7e8-4a50-950f-c1a64a94d500 is required': - if (($body['action'] ?? null) === 'teardown') { - error_log('Removing book with id fb5a885f-f7e8-4a50-950f-c1a64a94d500...'); - } else { - error_log('Creating book with id fb5a885f-f7e8-4a50-950f-c1a64a94d500...'); - } - break; - - default: - break; - } - - return $response; -}); - -try { - $app->run(); -} catch (HttpNotFoundException $exception) { - return false; -} diff --git a/php/src/consumer-1.php b/php/src/consumer-1.php deleted file mode 100644 index 82d8ee809..000000000 --- a/php/src/consumer-1.php +++ /dev/null @@ -1,120 +0,0 @@ - -// $code = file_get_contents(posix_getpwnam(get_current_user())['dir'] . '/.pact/ffi/pact.h'); -// $ffi = FFI::cdef($code, posix_getpwnam(get_current_user())['dir'] . '/.pact/ffi/osxaarch64/libpact_ffi.dylib'); - -$ffi->pactffi_init('LOG_LEVEL'); - -$pact = $ffi->pactffi_new_pact('http-consumer-1', 'http-provider'); -$ffi->pactffi_with_specification($pact, $ffi->PactSpecification_V3); - -$interaction = $ffi->pactffi_new_interaction($pact, 'A POST request to create book'); -$ffi->pactffi_upon_receiving($interaction, 'A POST request to create book'); -$ffi->pactffi_given($interaction, 'No book fixtures required'); -$ffi->pactffi_with_request($interaction, 'POST', '/api/books'); -$ffi->pactffi_with_header($interaction, $ffi->InteractionPart_Request, 'Content-Type', 0, 'application/json'); -$ffi->pactffi_with_body($interaction, $ffi->InteractionPart_Request, 'application/json', '{ - "isbn": { - "pact:matcher:type": "type", - "value": "0099740915" - }, - "title": { - "pact:matcher:type": "type", - "value": "The Handmaid\'s Tale" - }, - "description": { - "pact:matcher:type": "type", - "value": "Brilliantly conceived and executed, this powerful evocation of twenty-first century America gives full rein to Margaret Atwood\'s devastating irony, wit and astute perception." - }, - "author": { - "pact:matcher:type": "type", - "value": "Margaret Atwood" - }, - "publicationDate": { - "pact:matcher:type": "regex", - "regex": "^\\\\d{4}-[01]\\\\d-[0-3]\\\\dT[0-2]\\\\d:[0-5]\\\\d:[0-5]\\\\d([+-][0-2]\\\\d:[0-5]\\\\d|Z)$", - "value": "1985-07-31T00:00:00+00:00" - } - }'); -$ffi->pactffi_response_status($interaction, 201); -$ffi->pactffi_with_header($interaction, $ffi->InteractionPart_Response, 'Content-Type', 0, 'application/ld+json; charset=utf-8'); -$ffi->pactffi_with_body($interaction, $ffi->InteractionPart_Response, 'application/ld+json; charset=utf-8', '{ - "@context": "/api/contexts/Book", - "@id": { - "pact:matcher:type": "regex", - "regex": "^\\\\/api\\\\/books\\\\/[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$", - "value": "/api/books/0114b2a8-3347-49d8-ad99-0e792c5a30e6" - }, - "@type": "Book", - "title": { - "pact:matcher:type": "type", - "value": "Voluptas et tempora repellat corporis excepturi." - }, - "description": { - "pact:matcher:type": "type", - "value": "Quaerat odit quia nisi accusantium natus voluptatem. Explicabo corporis eligendi ut ut sapiente ut qui quidem. Optio amet velit aut delectus. Sed alias asperiores perspiciatis deserunt omnis. Mollitia unde id in." - }, - "author": { - "pact:matcher:type": "type", - "value": "Melisa Kassulke" - }, - "publicationDate": { - "pact:matcher:type": "regex", - "regex": "^\\\\d{4}-[01]\\\\d-[0-3]\\\\dT[0-2]\\\\d:[0-5]\\\\d:[0-5]\\\\d([+-][0-2]\\\\d:[0-5]\\\\d|Z)$", - "value": "1999-02-13T00:00:00+07:00" - }, - "reviews": [ - - ] - }'); - -$port = $ffi->pactffi_create_mock_server_for_pact($pact, '127.0.0.1:0', false); -echo sprintf("Mock server port=%d\n", $port); - -$client = HttpClient::create(); - -$json = getenv('MATCHING') ? [ - 'isbn' => '0099740915', - 'title' => "The Handmaid's Tale", - 'description' => 'Brilliantly conceived and executed, this powerful evocation of twenty-first century America gives full rein to Margaret Atwood\'s devastating irony, wit and astute perception.', - 'author' => 'Margaret Atwood', - 'publicationDate' => '1985-07-31T00:00:00+00:00' -] : [ - 'isbn' => '0099740915', - 'title' => 123, - 'description' => 'Natus ut doloribus magni. Impedit aperiam ea similique. Sed architecto quod nulla maxime. Quibusdam inventore esse harum accusantium rerum nulla voluptatem.', - 'author' => 'Maryse Kulas', - 'publicationDate' => 'tommorow' -]; - -$response = $client->request( - 'POST', - sprintf('http://localhost:%d/api/books', $port), - [ - 'json' => $json, - ] -); - -echo sprintf("STATUS: %d\n", $response->getStatusCode()); -echo sprintf("HEADERS: %s\n", print_r($response->getHeaders(false), true)); -echo sprintf("BODY: %s\n", print_r(json_decode($response->getContent(false), true), true)); - -if ($ffi->pactffi_mock_server_matched($port)) { - echo getenv('MATCHING') ? "Mock server matched all requests, Yay!" : "Mock server matched all requests, That Is Not Good (tm)"; - echo "\n"; - - $ffi->pactffi_write_pact_file($port, __DIR__ . '/../pacts', false); -} else { - echo getenv('MATCHING') ? "We got some mismatches, Boo!" : "We got some mismatches, as expected."; - echo "\n"; - echo sprintf("Mismatches: %s\n", print_r(json_decode(FFI::string($ffi->pactffi_mock_server_mismatches($port)), true), true)); -} - -$ffi->pactffi_cleanup_mock_server($port); diff --git a/php/src/consumer-2.php b/php/src/consumer-2.php deleted file mode 100644 index 51f1386e8..000000000 --- a/php/src/consumer-2.php +++ /dev/null @@ -1,86 +0,0 @@ - -// $code = file_get_contents(posix_getpwnam(get_current_user())['dir'] . '/.pact/ffi/pact.h'); -// $ffi = FFI::cdef($code, posix_getpwnam(get_current_user())['dir'] . '/.pact/ffi/osxaarch64/libpact_ffi.dylib'); - -$ffi->pactffi_init('LOG_LEVEL'); - -$pact = $ffi->pactffi_new_pact('http-consumer-2', 'http-provider'); -$ffi->pactffi_with_specification($pact, $ffi->PactSpecification_V3); - -$interaction = $ffi->pactffi_new_interaction($pact, 'A PUT request to generate book cover'); -$ffi->pactffi_upon_receiving($interaction, 'A PUT request to generate book cover'); -$ffi->pactffi_given($interaction, 'A book with id fb5a885f-f7e8-4a50-950f-c1a64a94d500 is required'); -$ffi->pactffi_with_request($interaction, 'PUT', '/api/books/fb5a885f-f7e8-4a50-950f-c1a64a94d500/generate-cover'); -$ffi->pactffi_with_header($interaction, $ffi->InteractionPart_Request, 'Content-Type', 0, 'application/json'); -$ffi->pactffi_with_body($interaction, $ffi->InteractionPart_Request, 'application/json', '[]'); -$ffi->pactffi_response_status($interaction, 204); - -$contents = '{ - "uuid": { - "pact:matcher:type": "regex", - "regex": "^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$", - "value": "fb5a885f-f7e8-4a50-950f-c1a64a94d500" - } -}'; -$length = \strlen($contents); -$size = $length + 1; -$cData = $ffi->new("uint8_t[{$size}]"); -FFI::memcpy($cData, $contents, $length); - -$messagePact = $ffi->pactffi_new_message_pact('message-consumer-2', 'message-provider'); -$message = $ffi->pactffi_new_message($messagePact, 'Book (id fb5a885f-f7e8-4a50-950f-c1a64a94d500) created message'); -$ffi->pactffi_message_expects_to_receive($message, 'Book (id fb5a885f-f7e8-4a50-950f-c1a64a94d500) created message'); -$ffi->pactffi_message_given($message, 'A book with id fb5a885f-f7e8-4a50-950f-c1a64a94d500 is required'); -$ffi->pactffi_message_with_contents($message, 'application/json', $cData, $size); - -$port = $ffi->pactffi_create_mock_server_for_pact($pact, '127.0.0.1:0', false); -echo sprintf("Mock server port=%d\n", $port); - -$messageHandler = function ($message) use ($port) { - if (!isset($message->uuid)) { - return; - } - - $client = HttpClient::create(); - - $response = $client->request( - 'PUT', - sprintf('http://localhost:%d/api/books/%s/generate-cover', $port, $message->uuid), - [ - 'json' => getenv('MATCHING') ? [] : [ - 'width' => '720', - 'height' => '1080' - ], - ] - ); - - echo sprintf("STATUS: %d\n", $response->getStatusCode()); - echo sprintf("HEADERS: %s\n", print_r($response->getHeaders(false), true)); - echo sprintf("BODY: %s\n", print_r(json_decode($response->getContent(false), true), true)); -}; - -$reified = $ffi->pactffi_message_reify($message); -$raw = json_decode($reified, false); -$messageHandler($raw->contents); - -if ($ffi->pactffi_mock_server_matched($port)) { - echo getenv('MATCHING') ? "Mock server matched all requests, Yay!" : "Mock server matched all requests, That Is Not Good (tm)"; - echo "\n"; - - $ffi->pactffi_write_pact_file($port, __DIR__ . '/../pacts', false); - $ffi->pactffi_write_message_pact_file($messagePact, __DIR__ . '/../pacts', false); -} else { - echo getenv('MATCHING') ? "We got some mismatches, Boo!" : "We got some mismatches, as expected."; - echo "\n"; - echo sprintf("Mismatches: %s\n", print_r(json_decode(FFI::string($ffi->pactffi_mock_server_mismatches($port)), true), true)); -} - -$ffi->pactffi_cleanup_mock_server($port); diff --git a/php/src/consumer-plugin.php b/php/src/consumer-plugin.php deleted file mode 100644 index 344a5100c..000000000 --- a/php/src/consumer-plugin.php +++ /dev/null @@ -1,77 +0,0 @@ - -// $code = file_get_contents(posix_getpwnam(get_current_user())['dir'] . '/.pact/ffi/pact.h'); -// $ffi = FFI::cdef($code, posix_getpwnam(get_current_user())['dir'] . '/.pact/ffi/osxaarch64/libpact_ffi.dylib'); - -// Setup Loggers -$ffi->pactffi_logger_init(); -$ffi->pactffi_logger_attach_sink('file ./logs/log-info.txt',5); -$ffi->pactffi_logger_attach_sink('file ./logs/log-error.txt',5); -$ffi->pactffi_logger_attach_sink('stdout', 3); -$ffi->pactffi_logger_attach_sink('stderr', 2); -$ffi->pactffi_logger_apply(); -$ffi->pactffi_log_message('pact-php-ffi', 'INFO', 'hello from pact php ffi, using Pact FFI Version: ' . $ffi->pactffi_version()); - -// Setup pact for testing -$pact = $ffi->pactffi_new_pact('grpc-consumer-php', 'area-calculator-provider'); -$ffi->pactffi_with_pact_metadata($pact, 'pact-php','ffi',$ffi->pactffi_version()); -$message_pact = $ffi->pactffi_new_sync_message_interaction($pact, 'A gRPC calculateOne request'); -$ffi->pactffi_with_specification($pact, $ffi->PactSpecification_V4); - -// Setup contents - -$proto_file_path = __DIR__ . '/../../proto/area_calculator.proto'; - -$contents = '{ - "pact:proto": "'. $proto_file_path . '", - "pact:proto-service": "Calculator/calculateOne", - "pact:content-type": "application/protobuf", - "request": { - "rectangle": { - "length": "matching(number, 3)", - "width": "matching(number, 4)" - } - }, - "response": { - "value": ["matching(number, 12)"] - } - }'; - -// Start mock server - -$ffi->pactffi_using_plugin($pact, 'protobuf', '0.1.17'); -$ffi->pactffi_interaction_contents($message_pact, 0, 'application/grpc', $contents); -$port = $ffi->pactffi_create_mock_server_for_transport($pact , '0.0.0.0',0,'grpc', null); - -echo sprintf("Mock server port=%d\n", $port); - -// This is where we would call our client, gRPC in this example plugin demo -// PHP gRPC is client only, so would need to use a provider from the following -// https://github.com/pact-foundation/pact-plugins/tree/main/examples/gRPC/area_calculator -// TODO build out an area calculator PHP example -// https://grpc.io/docs/languages/php/ - -// Check if requests match - Note this _should_ fail, but the pactffi_mock_server_matched is returning true -// even if no requests were made. - -if ($ffi->pactffi_mock_server_matched($port)) { - echo getenv('MATCHING') ? "Mock server matched all requests, Yay!" : "Mock server matched all requests, That Is Not Good (tm)"; - echo "\n"; - - $ffi->pactffi_write_pact_file($port, __DIR__ . '/../pacts', false); - // $ffi->pactffi_write_message_pact_file($messagePact, __DIR__ . '/../pacts', false); -} else { - echo getenv('MATCHING') ? "We got some mismatches, Boo!" : "We got some mismatches, as expected."; - echo "\n"; - echo sprintf("Mismatches: %s\n", print_r(json_decode(FFI::string($ffi->pactffi_mock_server_mismatches($port)), true), true)); -} - -$ffi->pactffi_cleanup_mock_server($port); -$ffi->pactffi_cleanup_plugins($pact); diff --git a/php/src/provider.php b/php/src/provider.php deleted file mode 100644 index 16a489330..000000000 --- a/php/src/provider.php +++ /dev/null @@ -1,113 +0,0 @@ -start(); -$process->waitUntil(function ($type, $output) { - return false !== strpos($output, 'Development Server (http://localhost:8000) started'); -}); - -$code = file_get_contents(__DIR__ . '/../../rust/pact_ffi/include/pact.h'); -$ffi = FFI::cdef($code, __DIR__ . '/../../rust/target/debug/libpact_ffi.dylib'); -// Macs use dylib extension, following will assume os's downloaded in users home dir ~/.pact/ffi/arch/libpact_ffi. -// $code = file_get_contents(posix_getpwnam(get_current_user())['dir'] . '/.pact/ffi/pact.h'); -// $ffi = FFI::cdef($code, posix_getpwnam(get_current_user())['dir'] . '/.pact/ffi/osxaarch64/libpact_ffi.dylib'); - -$ffi->pactffi_init('LOG_LEVEL'); - -$tags = ['feature-x', 'master', 'test', 'prod']; -$consumers = ['http-consumer-1', 'http-consumer-2', 'message-consumer-2','area-calculator-consumer']; - -function getCData(array $items): FFI\CData -{ - $itemsSize = count($items); - $cDataItems = FFI::new("char*[{$itemsSize}]"); - foreach ($items as $index => $item) { - $length = \strlen($item); - $itemSize = $length + 1; - $cDataItem = FFI::new("char[{$itemSize}]", false); - FFI::memcpy($cDataItem, $item, $length); - $cDataItems[$index] = $cDataItem; - } - - return $cDataItems; -} - -$handle = $ffi->pactffi_verifier_new(); - -// // gRPC ❌ -// // HTTP ✅ -// // Verification failed with an error - Failed to verify the request: gRPC error: status Unknown error, message 'transport error' -// $ffi->pactffi_verifier_set_provider_info($handle, 'http-provider', 'http', 'localhost', 8000, '/'); - - -// // gRPC ✅ -// // HTTP ❌ -// // Request Failed - builder error for url (tcp://localhost:37757): URL scheme is not allowed -// $ffi->pactffi_verifier_set_provider_info($handle, 'http-provider', 'tcp', 'localhost', 37757, '/'); - -// // // gRPC ✅ -// // // HTTP ❌ -// // // Request Failed - builder error for url (tcp://localhost:37757): URL scheme is not allowed -// $ffi->pactffi_verifier_set_provider_info($handle, 'http-provider', 'tcp', 'localhost', 37757, '/'); -// $ffi->pactffi_verifier_add_provider_transport($handle, 'http',8000,'/','http'); - -// // gRPC ✅ -// // HTTP ❌ -// // Verification failed with an error - Failed to verify the request: gRPC error: status Unknown error, message 'transport error' -$ffi->pactffi_verifier_set_provider_info($handle, 'http-provider', 'http', 'localhost', 8000, '/'); -$ffi->pactffi_verifier_add_provider_transport($handle, 'protobuf',37757,'/','tcp'); - - -// // gRPC ✅ -// // HTTP ❌ -// // Request Failed - builder error for url (tcp://localhost:37757): URL scheme is not allowed -// $ffi->pactffi_verifier_set_provider_info($handle, 'http-provider', 'tcp', 'localhost', 37757, '/'); -// $ffi->pactffi_verifier_add_provider_transport($handle, 'http',8000,'/','http'); - - -// // 💡 -// // This would be my preferrred option -// // Set the provider name (which should be used by anything using the verifier_handle, and filter sourced pacts that don't contain name) -// // add multiple transports. -// // note pactffi_verifier_set_provider_name does not exist -// // update_provider_info might work -// // https://github.com/pact-foundation/pact-reference/blob/cfb2c03f87b3f67464291dd936d0aac555c42c91/rust/pact_ffi/src/verifier/handle.rs#L89 -// // but is marked as deprecated. -// // -// // also worthy of note, if pactffi_verifier_set_provider_info didn't mix with the information used in pactffi_verifier_add_provider_transport -// // this probably wouldn't be neccessary. -// $ffi->pactffi_verifier_set_provider_name($handle, 'http-provider'); // note this function doesn't exist (wishlist) -// $ffi->pactffi_verifier_add_provider_transport($handle, 'http',8000,'/','http'); -// $ffi->pactffi_verifier_add_provider_transport($handle, 'protobuf',37757,'/','tcp'); - - - // gRPC ❌ - // HTTP ❌ - // You can't just pass nulls into set_provider_info as it provides default info - // https://github.com/pact-foundation/pact-reference/blob/master/rust/pact_ffi/src/verifier/mod.rs#L143 -$ffi->pactffi_verifier_set_provider_info($handle, 'http-provider', null, null, null, null); -$ffi->pactffi_verifier_add_provider_transport($handle, 'protobuf',37757,'/','tcp'); -// $ffi->pactffi_verifier_add_provider_transport($handle, 'http',8000,'/','http'); // registering a http transport doesnt work either - - - - -// $ffi->pactffi_verifier_set_filter_info($handle, '', 'book', false); -$ffi->pactffi_verifier_set_provider_state($handle, 'http://localhost:8000/change-state', true, true); -$ffi->pactffi_verifier_set_verification_options($handle, false, 5000); -$ffi->pactffi_verifier_set_publish_options($handle, '1.0.0', null, getCData($tags), count($tags), 'some-branch'); -$ffi->pactffi_verifier_set_consumer_filters($handle, getCData($consumers), count($consumers)); -// $ffi->pactffi_verifier_add_provider_transport($handle, 'protobuf',37757,null,'tcp'); -$ffi->pactffi_verifier_add_directory_source($handle, __DIR__ . '/../pacts'); -$result = $ffi->pactffi_verifier_execute($handle); -$ffi->pactffi_verifier_shutdown($handle); - -if (!$result) { - echo "Verifier verified all contracts, Yay!\n"; -} else { - echo "We got some problems, Boo!\n"; -} diff --git a/proto/area_calculator.proto b/proto/area_calculator.proto deleted file mode 100644 index a622379e9..000000000 --- a/proto/area_calculator.proto +++ /dev/null @@ -1,53 +0,0 @@ -syntax = "proto3"; - -package area_calculator; - -option php_generic_services = true; -option go_package = "io.pact/area_calculator"; - -service Calculator { - rpc calculateOne (ShapeMessage) returns (AreaResponse) {} - rpc calculateMulti (AreaRequest) returns (AreaResponse) {} -} - -message ShapeMessage { - oneof shape { - Square square = 1; - Rectangle rectangle = 2; - Circle circle = 3; - Triangle triangle = 4; - Parallelogram parallelogram = 5; - } -} - -message Square { - float edge_length = 1; -} - -message Rectangle { - float length = 1; - float width = 2; -} - -message Circle { - float radius = 1; -} - -message Triangle { - float edge_a = 1; - float edge_b = 2; - float edge_c = 3; -} - -message Parallelogram { - float base_length = 1; - float height = 2; -} - -message AreaRequest { - repeated ShapeMessage shapes = 1; -} - -message AreaResponse { - repeated float value = 1; -} \ No newline at end of file diff --git a/python/.gitignore b/python/.gitignore deleted file mode 100644 index ed8ebf583..000000000 --- a/python/.gitignore +++ /dev/null @@ -1 +0,0 @@ -__pycache__ \ No newline at end of file diff --git a/python/Makefile b/python/Makefile deleted file mode 100644 index 3b5c990a5..000000000 --- a/python/Makefile +++ /dev/null @@ -1,33 +0,0 @@ -build_ffi: - cd ../rust/pact_ffi && \ - cargo build && \ - rustup run nightly cbindgen \ - --crate pact_ffi \ - --output include/pact.h - -deps: - pip install -r requires.txt - -run_hello_ffi: - python hello_ffi.py - -run_pact_http: - python pact_http_create_mock_server.py - -run_pact_http_create_mock_server_for_pact: - python pact_http_create_mock_server_for_pact.py - -run_pact_message_v3: - python pact_message_v3.py - -run_pact_plugin_grpc_v4: - python pact_plugin_grpc_v4.py - -test: deps run_pact_http run_pact_http_create_mock_server_for_pact run_pact_message_v3 run_pact_plugin_grpc_v4 - -pacts_show: - cat "pacts/Consumer-Alice Service.json" | jq . - cat "pacts/http-consumer-1-http-provider.json" | jq . - cat "pacts/http-consumer-2-http-provider.json" | jq . - cat "pacts/message-consumer-2-message-provider.json" | jq . - cat "pacts/grpc-consumer-python-area-calculator-provider.json" | jq . \ No newline at end of file diff --git a/python/hello_ffi.py b/python/hello_ffi.py deleted file mode 100644 index aef2188dd..000000000 --- a/python/hello_ffi.py +++ /dev/null @@ -1,7 +0,0 @@ -from cffi import FFI -from register_ffi import get_ffi_lib - -ffi = FFI() -lib = get_ffi_lib(ffi) # loads the entire C namespace -result = lib.pactffi_version() -print(ffi.string(result).decode('utf-8')) \ No newline at end of file diff --git a/python/pact_http_create_mock_server.py b/python/pact_http_create_mock_server.py deleted file mode 100644 index 59aea10b2..000000000 --- a/python/pact_http_create_mock_server.py +++ /dev/null @@ -1,94 +0,0 @@ -from cffi import FFI -from register_ffi import get_ffi_lib -import json -import requests - -ffi = FFI() -lib = get_ffi_lib(ffi) # loads the entire C namespace -version_encoded = lib.pactffi_version() -ffi_version = ffi.string(version_encoded).decode('utf-8') - -contents ={ - "provider": { - "name": "Alice Service" - }, - "consumer": { - "name": "Consumer" - }, - "interactions": [ - { - "description": "a retrieve Mallory request", - "request": { - "method": "GET", - "path": "/mallory", - "query": "name=ron&status=good" - }, - "response": { - "status": 200, - "headers": { - "Content-Type": "text/html" - }, - "body": "That is some good Mallory." - } - } - ], - "metadata": { - "pact-specification": { - "version": "1.0.0" - }, - "pact-python": { - "version": "1.0.0", - "ffi": ffi_version - } - } - } - -print(contents) - -## Setup Loggers - -lib.pactffi_logger_init() -lib.pactffi_logger_attach_sink(b'file ./logs/log-info.txt',5) -lib.pactffi_logger_attach_sink(b'file ./logs/log-error.txt',5) -# lib.pactffi_logger_attach_sink(b'stdout', 5) -# lib.pactffi_logger_attach_sink(b'stderr', 5) -lib.pactffi_logger_apply() -lib.pactffi_log_message(b'pact_python_ffi', b'INFO', b'hello from pact python ffi, using Pact FFI Version: '+ ffi.string(version_encoded)) - - -## Load pact into Mock Server and start -mock_server_port = lib.pactffi_create_mock_server(ffi.new("char[]", json.dumps(contents).encode('ascii')) , b'127.0.0.1:4432',0) -print(f"Mock server started: {mock_server_port}") - -## Make our client call - -expected_response = 'That is some good Mallory.' -try: - response = requests.get(f"http://127.0.0.1:{mock_server_port}/mallory?name=ron&status=good") - print(f"Client response - matched: {response.text}") - print(f"Client response - matched: {response.text == expected_response}") - response.raise_for_status() -except requests.HTTPError as http_err: - print(f'Client request - HTTP error occurred: {http_err}') # Python 3.6 -except Exception as err: - print(f'Client request - Other error occurred: {err}') # Python 3.6 - -result = lib.pactffi_mock_server_matched(mock_server_port) -print(f"Pact - Got matching client requests: {result}") -if result == True: - PACT_FILE_DIR='./pacts' - print(f"Writing pact file to {PACT_FILE_DIR}") - res_write_pact = lib.pactffi_write_pact_file(mock_server_port, PACT_FILE_DIR.encode('ascii'), False) - print(f"Pact file writing results: {res_write_pact}") -else: - print('pactffi_mock_server_matched did not match') - mismatchers = lib.pactffi_mock_server_mismatches(mock_server_port) - result = json.loads(ffi.string(mismatchers)) - print(json.dumps(result, indent=4)) - logs = lib.pactffi_mock_server_logs(mock_server_port) - print(logs) - -## Cleanup - -lib.pactffi_cleanup_mock_server(mock_server_port) - diff --git a/python/pact_http_create_mock_server_for_pact.py b/python/pact_http_create_mock_server_for_pact.py deleted file mode 100644 index a337f0ccc..000000000 --- a/python/pact_http_create_mock_server_for_pact.py +++ /dev/null @@ -1,132 +0,0 @@ -from cffi import FFI -from register_ffi import get_ffi_lib -import json -import requests - -ffi = FFI() -lib = get_ffi_lib(ffi) # loads the entire C namespace -version_encoded = lib.pactffi_version() -ffi_version = ffi.string(version_encoded).decode('utf-8') - -request_interaction_body = { - "isbn": { - "pact:matcher:type": "type", - "value": "0099740915" - }, - "title": { - "pact:matcher:type": "type", - "value": "The Handmaid\'s Tale" - }, - "description": { - "pact:matcher:type": "type", - "value": "Brilliantly conceived and executed, this powerful evocation of twenty-first century America gives full rein to Margaret Atwood\'s devastating irony, wit and astute perception." - }, - "author": { - "pact:matcher:type": "type", - "value": "Margaret Atwood" - }, - "publicationDate": { - "pact:matcher:type": "regex", - "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z)$", - "value": "1985-07-31T00:00:00+00:00" - } - } - -# print(request_interaction_body) - -response_interaction_body = { - "@context": "/api/contexts/Book", - "@id": { - "pact:matcher:type": "regex", - "regex": "^\\/api\\/books\\/[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$", - "value": "/api/books/0114b2a8-3347-49d8-ad99-0e792c5a30e6" - }, - "@type": "Book", - "title": { - "pact:matcher:type": "type", - "value": "Voluptas et tempora repellat corporis excepturi." - }, - "description": { - "pact:matcher:type": "type", - "value": "Quaerat odit quia nisi accusantium natus voluptatem. Explicabo corporis eligendi ut ut sapiente ut qui quidem. Optio amet velit aut delectus. Sed alias asperiores perspiciatis deserunt omnis. Mollitia unde id in." - }, - "author": { - "pact:matcher:type": "type", - "value": "Melisa Kassulke" - }, - "publicationDate": { - "pact:matcher:type": "regex", - "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z)$", - "value": "1999-02-13T00:00:00+07:00" - }, - "reviews": [ - ] - } - -print(response_interaction_body) - -## Setup Loggers - -lib.pactffi_logger_init() -lib.pactffi_logger_attach_sink(b'file ./logs/log-info.txt',5) -lib.pactffi_logger_attach_sink(b'file ./logs/log-error.txt',5) -# lib.pactffi_logger_attach_sink(b'stdout', 5) -# lib.pactffi_logger_attach_sink(b'stderr', 5) -lib.pactffi_logger_apply() -lib.pactffi_log_message(b'pact_python_ffi', b'INFO', b'hello from pact python ffi, using Pact FFI Version: '+ ffi.string(version_encoded)) - - -## Setup pact for testing -pact = lib.pactffi_new_pact(b'http-consumer-1', b'http-provider') -lib.pactffi_with_pact_metadata(pact, b'pact-python', b'ffi', ffi.string(version_encoded)) -interaction = lib.pactffi_new_interaction(pact, b'A POST request to create book') -# setup interaction request -lib.pactffi_upon_receiving(interaction, b'A POST request to create book') -lib.pactffi_given(interaction, b'No book fixtures required') -lib.pactffi_with_request(interaction, b'POST', b'/api/books') -lib.pactffi_with_header_v2(interaction, 0,b'Content-Type', 0, b'application/json') -lib.pactffi_with_body(interaction, 0,b'application/json', ffi.new("char[]", json.dumps(request_interaction_body).encode('ascii'))) -# setup interaction response -lib.pactffi_response_status(interaction, 200) -lib.pactffi_with_header_v2(interaction, 1,b'Content-Type', 0, b'application/ld+json; charset=utf-8') -lib.pactffi_with_body(interaction, 1,b'application/ld+json; charset=utf-8', ffi.new("char[]", json.dumps(response_interaction_body).encode('ascii'))) - -# Start mock server -mock_server_port = lib.pactffi_create_mock_server_for_pact(pact , b'0.0.0.0:0',0) -print(f"Mock server started: {mock_server_port}") - -## Make our client call -body = { - "isbn": '0099740915', - "title": "The Handmaid's Tale", - "description": 'Brilliantly conceived and executed, this powerful evocation of twenty-first century America gives full rein to Margaret Atwood\'s devastating irony, wit and astute perception.', - "author": 'Margaret Atwood', - "publicationDate": '1985-07-31T00:00:00+00:00' - } -expected_response = '{"@context":"/api/contexts/Book","@id":"/api/books/0114b2a8-3347-49d8-ad99-0e792c5a30e6","@type":"Book","author":"Melisa Kassulke","description":"Quaerat odit quia nisi accusantium natus voluptatem. Explicabo corporis eligendi ut ut sapiente ut qui quidem. Optio amet velit aut delectus. Sed alias asperiores perspiciatis deserunt omnis. Mollitia unde id in.","publicationDate":"1999-02-13T00:00:00+07:00","reviews":[],"title":"Voluptas et tempora repellat corporis excepturi."}' -try: - response = requests.post(f"http://127.0.0.1:{mock_server_port}/api/books", data=json.dumps(body), - headers={'Content-Type': 'application/json'}) - print(f"Client response - matched: {response.text}") - print(f"Client response - matched: {response.text == expected_response}") - response.raise_for_status() -except requests.HTTPError as http_err: - print(f'Client request - HTTP error occurred: {http_err}') # Python 3.6 -except Exception as err: - print(f'Client request - Other error occurred: {err}') # Python 3.6 - -result = lib.pactffi_mock_server_matched(mock_server_port) -print(f"Pact - Got matching client requests: {result}") -if result == True: - PACT_FILE_DIR='./pacts' - print(f"Writing pact file to {PACT_FILE_DIR}") - res_write_pact = lib.pactffi_write_pact_file(mock_server_port, PACT_FILE_DIR.encode('ascii'), False) - print(f"Pact file writing results: {res_write_pact}") -else: - print('pactffi_mock_server_matched did not match') - mismatchers = lib.pactffi_mock_server_mismatches(mock_server_port) - result = json.loads(ffi.string(mismatchers)) - print(json.dumps(result, indent=4)) - -## Cleanup -lib.pactffi_cleanup_mock_server(mock_server_port) diff --git a/python/pact_message_v3.py b/python/pact_message_v3.py deleted file mode 100644 index ca5bb9ced..000000000 --- a/python/pact_message_v3.py +++ /dev/null @@ -1,85 +0,0 @@ -from cffi import FFI -from register_ffi import get_ffi_lib -import json -import requests - -ffi = FFI() -lib = get_ffi_lib(ffi) # loads the entire C namespace -version_encoded = lib.pactffi_version() -ffi_version = ffi.string(version_encoded).decode('utf-8') - -contents = { - "uuid": { - "pact:matcher:type": 'regex', - "regex": '^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$', - "value": 'fb5a885f-f7e8-4a50-950f-c1a64a94d500' - } - } -## Setup Loggers - -lib.pactffi_logger_init() -lib.pactffi_logger_attach_sink(b'file ./logs/log-info.txt',5) -lib.pactffi_logger_attach_sink(b'file ./logs/log-error.txt',5) -# lib.pactffi_logger_attach_sink(b'stdout', 5) -# lib.pactffi_logger_attach_sink(b'stderr', 5) -lib.pactffi_logger_apply() -lib.pactffi_log_message(b'pact_python_ffi', b'INFO', b'hello from pact python ffi, using Pact FFI Version: '+ ffi.string(version_encoded)) - - -## Setup pact for testing -pact = lib.pactffi_new_pact(b'http-consumer-2', b'http-provider') -lib.pactffi_with_pact_metadata(pact, b'pact-python', b'ffi', ffi.string(version_encoded)) -interaction = lib.pactffi_new_interaction(pact, b'A PUT request to generate book cover') -message_pact = lib.pactffi_new_pact(b'message-consumer-2', b'message-provider') -message = lib.pactffi_new_message(message_pact, b'Book (id fb5a885f-f7e8-4a50-950f-c1a64a94d500) created message') - -# setup interaction request -lib.pactffi_upon_receiving(interaction, b'A PUT request to generate book cover') -lib.pactffi_given(interaction, b'A book with id fb5a885f-f7e8-4a50-950f-c1a64a94d500 is required') -lib.pactffi_with_request(interaction, b'PUT', b'/api/books/fb5a885f-f7e8-4a50-950f-c1a64a94d500/generate-cover') -lib.pactffi_with_header_v2(interaction, 0,b'Content-Type', 0, b'application/json') -lib.pactffi_with_body(interaction, 0,b'application/json', b'[]') -# setup interaction response -lib.pactffi_response_status(interaction, 204) -length = len(json.dumps(contents)) -size = length + 1 -# memBuf = FFI::MemoryPointer.new(:uint, length) -# memBuf.put_bytes(0, json.dump(contents)) -lib.pactffi_message_expects_to_receive(message,b'Book (id fb5a885f-f7e8-4a50-950f-c1a64a94d500) created message') -lib.pactffi_message_given(message, b'A book with id fb5a885f-f7e8-4a50-950f-c1a64a94d500 is required') -lib.pactffi_message_with_contents(message, b'application/json', ffi.new("char[]", json.dumps(contents).encode('ascii')), size) -# Start mock server -mock_server_port = lib.pactffi_create_mock_server_for_pact(pact , b'0.0.0.0:0',0) -print(f"Mock server started: {mock_server_port}") -reified = lib.pactffi_message_reify(message) -uuid = json.loads(ffi.string(reified).decode('utf-8'))['contents']['uuid'] -## Make our client call -body = [] -try: - response = requests.put(f"http://127.0.0.1:{mock_server_port}/api/books/{uuid}/generate-cover", data=json.dumps(body), - headers={'Content-Type': 'application/json'}) - print(f"Client response - matched: {response.text}") - print(f"Client response - matched: {response.status_code}") - print(f"Client response - matched: {response.status_code == '204'}") - response.raise_for_status() -except requests.HTTPError as http_err: - print(f'Client request - HTTP error occurred: {http_err}') # Python 3.6 -except Exception as err: - print(f'Client request - Other error occurred: {err}') # Python 3.6 - -result = lib.pactffi_mock_server_matched(mock_server_port) -print(f"Pact - Got matching client requests: {result}") -if result == True: - PACT_FILE_DIR='./pacts' - print(f"Writing pact file to {PACT_FILE_DIR}") - res_write_pact = lib.pactffi_write_pact_file(mock_server_port, PACT_FILE_DIR.encode('ascii'), False) - res_write_message_pact = lib.pactffi_write_message_pact_file(message_pact, PACT_FILE_DIR.encode('ascii'), False) - print(f"Pact file writing results: {res_write_pact}") -else: - print('pactffi_mock_server_matched did not match') - mismatchers = lib.pactffi_mock_server_mismatches(mock_server_port) - result = json.loads(ffi.string(mismatchers)) - print(json.dumps(result, indent=4)) - -## Cleanup -lib.pactffi_cleanup_mock_server(mock_server_port) diff --git a/python/pact_plugin_grpc_v4.py b/python/pact_plugin_grpc_v4.py deleted file mode 100644 index 409b52de7..000000000 --- a/python/pact_plugin_grpc_v4.py +++ /dev/null @@ -1,78 +0,0 @@ -import sys -from cffi import FFI -from register_ffi import get_ffi_lib -import json -import os -from pathlib import Path -# sys.path.insert(0, './examples/area_calculator') -# from area_calculator_client import get_rectangle_area - -ffi = FFI() -lib = get_ffi_lib(ffi) # loads the entire C namespace -version_encoded = lib.pactffi_version() -ffi_version = ffi.string(version_encoded).decode('utf-8') - -contents = { - "pact:proto": os.path.abspath('../proto/area_calculator.proto'), - "pact:proto-service": 'Calculator/calculateOne', - "pact:content-type": 'application/protobuf', - "request": { - "rectangle": { - "length": 'matching(number, 3)', - "width": 'matching(number, 4)' - } - }, - "response": { - "value": ['matching(number, 12)'] - } - } - -## Setup Loggers - -lib.pactffi_logger_init() -lib.pactffi_logger_attach_sink(b'file ./logs/log-info.txt',5) -lib.pactffi_logger_attach_sink(b'file ./logs/log-error.txt',5) -lib.pactffi_logger_attach_sink(b'stdout', 3) -# lib.pactffi_logger_attach_sink(b'stderr', 5) -lib.pactffi_logger_apply() -lib.pactffi_log_message(b'pact_python_ffi', b'INFO', b'hello from pact python ffi, using Pact FFI Version: '+ ffi.string(version_encoded)) - - -## Setup pact for testing -pact = lib.pactffi_new_pact(b'grpc-consumer-python', b'area-calculator-provider') -lib.pactffi_with_pact_metadata(pact, b'pact-python', b'ffi', ffi.string(version_encoded)) -message_pact = lib.pactffi_new_sync_message_interaction(pact, b'A gRPC calculateMulti request') -lib.pactffi_with_specification(pact, 5) - - - -# Start mock server -lib.pactffi_using_plugin(pact, b'protobuf', b'0.1.17') -lib.pactffi_interaction_contents(message_pact, 0, b'application/grpc', ffi.new("char[]", json.dumps(contents).encode('ascii'))) -mock_server_port = lib.pactffi_create_mock_server_for_transport(pact , b'0.0.0.0',0,b'grpc',ffi.cast("void *", 0)) -print(f"Mock server started: {mock_server_port}") - -# ## Make our client call -# expected_response = 12.0 -# response = get_rectangle_area(f"localhost:{mock_server_port}") -# print(f"Client response: {response}") -# print(f"Client response - matched expected: {response == expected_response}") - -result = lib.pactffi_mock_server_matched(mock_server_port) -print(f"Pact - Got matching client requests: {result}") - -if result == True: - PACT_FILE_DIR='./pacts' - print(f"Writing pact file to {PACT_FILE_DIR}") - res_write_pact = lib.pactffi_write_pact_file(mock_server_port, PACT_FILE_DIR.encode('ascii'), False) - print(f"Pact file writing results: {res_write_pact}") -else: - print('pactffi_mock_server_matched did not match') - mismatchers = lib.pactffi_mock_server_mismatches(mock_server_port) - if mismatchers: - result = json.loads(ffi.string(mismatchers)) - print(json.dumps(result, indent=4)) - -# Cleanup -lib.pactffi_cleanup_mock_server(mock_server_port) -lib.pactffi_cleanup_plugins(pact) diff --git a/python/pact_xml.py b/python/pact_xml.py deleted file mode 100644 index 62a2ca1ef..000000000 --- a/python/pact_xml.py +++ /dev/null @@ -1,104 +0,0 @@ -import requests -import xml.etree.ElementTree as ET -from cffi import FFI -from register_ffi import get_ffi_lib -import json -import requests -ffi = FFI() - -lib = get_ffi_lib(ffi) # loads the entire C namespace -lib.pactffi_logger_init() -lib.pactffi_logger_attach_sink(b'stdout', 5) -lib.pactffi_logger_apply() -version_encoded = lib.pactffi_version() -lib.pactffi_log_message(b'pact_python_ffi', b'INFO', b'hello from pact python ffi, using Pact FFI Version: '+ ffi.string(version_encoded)) - -expected_response_body = ''' - - - 1 - - - 1 - Do the laundry - true - - - 2 - Do the dishes - false - - - 3 - Do the backyard - false - - - 4 - Do nothing - false - - - - ''' -format = 'xml' -content_type = 'application/' + format -pact_handle = lib.pactffi_new_pact(b'consumer',b'provider') -lib.pactffi_with_pact_metadata(pact_handle, b'pact-python', b'version', b'1.0.0') -interaction_handle = lib.pactffi_new_interaction(pact_handle, b'description') -lib.pactffi_given(interaction_handle, b'i have a list of projects') -lib.pactffi_upon_receiving(interaction_handle, b'a request for projects in XML') -lib.pactffi_with_request(interaction_handle, b'GET', b'/projects') -lib.pactffi_with_header_v2(interaction_handle, 0, b'Accept', 0, content_type.encode('ascii')) - -# lib.pactffi_with_header_v2(interaction_handle, 1, b'Content-Type', 0, content_type.encode('ascii')) -# lib.pactffi_with_header_v2(interaction_handle, 1, b'content-type', 1, content_type.encode('ascii')) -lib.pactffi_with_body(interaction_handle, 1, content_type.encode('ascii'), expected_response_body.encode('ascii')) - -mock_server_port = lib.pactffi_create_mock_server_for_transport(pact_handle, b'127.0.0.1', 0, b'http', b'{}') -print(f"Mock server started: {mock_server_port}") -try: - uri = f"http://127.0.0.1:{mock_server_port}/projects" - response = requests.get(uri, - headers={'Accept': content_type}) - response.raise_for_status() -except requests.HTTPError as http_err: - print(f'Client request - HTTP error occurred: {http_err}') # Python 3.6 -except Exception as err: - print(f'Client request - Other error occurred: {err}') # Python 3.6 - -# Check the client made the right request - -result = lib.pactffi_mock_server_matched(mock_server_port) -print(f"Pact - Got matching client requests: {result}") -if result == True: - PACT_FILE_DIR='./pacts' - print(f"Writing pact file to {PACT_FILE_DIR}") - res_write_pact = lib.pactffi_write_pact_file(mock_server_port, PACT_FILE_DIR.encode('ascii'), False) - print(f"Pact file writing results: {res_write_pact}") -else: - print('pactffi_mock_server_matched did not match') - mismatches = lib.pactffi_mock_server_mismatches(mock_server_port) - result = json.loads(ffi.string(mismatches)) - print(json.dumps(result, indent=4)) - native_logs = lib.pactffi_mock_server_logs(mock_server_port) - logs = ffi.string(native_logs).decode("utf-8").rstrip().split("\n") - print(logs) - -## Cleanup - -lib.pactffi_cleanup_mock_server(mock_server_port) -assert result == True -print(f"Client request - matched: {response.text}") -# Check our response came back from the provider ok - -assert response.text != '' # This should always have a response -projects = ET.fromstring(response.text) -assert len(projects) == 1 -assert projects[0][0].text == '1' -tasks = projects[0].findall('tasks')[0] -assert len(tasks) == 4 -assert tasks[0][0].text == '1' -# assert tasks[0][1].text == 'Do the laundry' -print(f"Client response - matched: {response.text}") -print(f"Client response - matched: {response.text == expected_response_body}") diff --git a/python/pacts/Consumer-Alice Service.json b/python/pacts/Consumer-Alice Service.json deleted file mode 100644 index a7bd26cfd..000000000 --- a/python/pacts/Consumer-Alice Service.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "consumer": { - "name": "Consumer" - }, - "interactions": [ - { - "description": "a retrieve Mallory request", - "request": { - "method": "GET", - "path": "/mallory", - "query": "name=ron&status=good" - }, - "response": { - "body": "That is some good Mallory.", - "headers": { - "Content-Type": "text/html" - }, - "status": 200 - } - } - ], - "metadata": { - "pact-python": { - "ffi": "0.3.15", - "version": "1.0.0" - }, - "pactRust": { - "mockserver": "0.9.5", - "models": "1.0.0" - }, - "pactSpecification": { - "version": "1.0.0" - } - }, - "provider": { - "name": "Alice Service" - } -} \ No newline at end of file diff --git a/python/pacts/grpc-consumer-python-area-calculator-provider.json b/python/pacts/grpc-consumer-python-area-calculator-provider.json deleted file mode 100644 index 68e55e4c2..000000000 --- a/python/pacts/grpc-consumer-python-area-calculator-provider.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "consumer": { - "name": "grpc-consumer-python" - }, - "interactions": [ - { - "description": "A gRPC calculateMulti request", - "interactionMarkup": { - "markup": "```protobuf\nmessage AreaResponse {\n repeated float value = 1;\n}\n```\n", - "markupType": "COMMON_MARK" - }, - "key": "103e10b578b91d6b", - "pending": false, - "pluginConfiguration": { - "protobuf": { - "descriptorKey": "a85dff8f82655a9681aad113575dcfbb", - "service": "Calculator/calculateOne" - } - }, - "request": { - "contents": { - "content": "EgoNAABAQBUAAIBA", - "contentType": "application/protobuf;message=ShapeMessage", - "contentTypeHint": "BINARY", - "encoded": "base64" - }, - "matchingRules": { - "body": { - "$.rectangle.length": { - "combine": "AND", - "matchers": [ - { - "match": "number" - } - ] - }, - "$.rectangle.width": { - "combine": "AND", - "matchers": [ - { - "match": "number" - } - ] - } - } - } - }, - "response": [ - { - "contents": { - "content": "CgQAAEBB", - "contentType": "application/protobuf;message=AreaResponse", - "contentTypeHint": "BINARY", - "encoded": "base64" - }, - "matchingRules": { - "body": { - "$.value[0].*": { - "combine": "AND", - "matchers": [ - { - "match": "number" - } - ] - } - } - } - } - ], - "transport": "grpc", - "type": "Synchronous/Messages" - } - ], - "metadata": { - "pact-python": { - "ffi": "0.3.15" - }, - "pactRust": { - "ffi": "0.3.15", - "mockserver": "0.9.5", - "models": "1.0.0" - }, - "pactSpecification": { - "version": "4.0" - }, - "plugins": [ - { - "configuration": { - "a85dff8f82655a9681aad113575dcfbb": { - "protoDescriptors": "CsoHChVhcmVhX2NhbGN1bGF0b3IucHJvdG8SD2FyZWFfY2FsY3VsYXRvciK6AgoMU2hhcGVNZXNzYWdlEjEKBnNxdWFyZRgBIAEoCzIXLmFyZWFfY2FsY3VsYXRvci5TcXVhcmVIAFIGc3F1YXJlEjoKCXJlY3RhbmdsZRgCIAEoCzIaLmFyZWFfY2FsY3VsYXRvci5SZWN0YW5nbGVIAFIJcmVjdGFuZ2xlEjEKBmNpcmNsZRgDIAEoCzIXLmFyZWFfY2FsY3VsYXRvci5DaXJjbGVIAFIGY2lyY2xlEjcKCHRyaWFuZ2xlGAQgASgLMhkuYXJlYV9jYWxjdWxhdG9yLlRyaWFuZ2xlSABSCHRyaWFuZ2xlEkYKDXBhcmFsbGVsb2dyYW0YBSABKAsyHi5hcmVhX2NhbGN1bGF0b3IuUGFyYWxsZWxvZ3JhbUgAUg1wYXJhbGxlbG9ncmFtQgcKBXNoYXBlIikKBlNxdWFyZRIfCgtlZGdlX2xlbmd0aBgBIAEoAlIKZWRnZUxlbmd0aCI5CglSZWN0YW5nbGUSFgoGbGVuZ3RoGAEgASgCUgZsZW5ndGgSFAoFd2lkdGgYAiABKAJSBXdpZHRoIiAKBkNpcmNsZRIWCgZyYWRpdXMYASABKAJSBnJhZGl1cyJPCghUcmlhbmdsZRIVCgZlZGdlX2EYASABKAJSBWVkZ2VBEhUKBmVkZ2VfYhgCIAEoAlIFZWRnZUISFQoGZWRnZV9jGAMgASgCUgVlZGdlQyJICg1QYXJhbGxlbG9ncmFtEh8KC2Jhc2VfbGVuZ3RoGAEgASgCUgpiYXNlTGVuZ3RoEhYKBmhlaWdodBgCIAEoAlIGaGVpZ2h0IkQKC0FyZWFSZXF1ZXN0EjUKBnNoYXBlcxgBIAMoCzIdLmFyZWFfY2FsY3VsYXRvci5TaGFwZU1lc3NhZ2VSBnNoYXBlcyIkCgxBcmVhUmVzcG9uc2USFAoFdmFsdWUYASADKAJSBXZhbHVlMq0BCgpDYWxjdWxhdG9yEk4KDGNhbGN1bGF0ZU9uZRIdLmFyZWFfY2FsY3VsYXRvci5TaGFwZU1lc3NhZ2UaHS5hcmVhX2NhbGN1bGF0b3IuQXJlYVJlc3BvbnNlIgASTwoOY2FsY3VsYXRlTXVsdGkSHC5hcmVhX2NhbGN1bGF0b3IuQXJlYVJlcXVlc3QaHS5hcmVhX2NhbGN1bGF0b3IuQXJlYVJlc3BvbnNlIgBCHFoXaW8ucGFjdC9hcmVhX2NhbGN1bGF0b3LQAgFiBnByb3RvMw==", - "protoFile": "syntax = \"proto3\";\n\npackage area_calculator;\n\noption php_generic_services = true;\noption go_package = \"io.pact/area_calculator\";\n\nservice Calculator {\n rpc calculateOne (ShapeMessage) returns (AreaResponse) {}\n rpc calculateMulti (AreaRequest) returns (AreaResponse) {}\n}\n\nmessage ShapeMessage {\n oneof shape {\n Square square = 1;\n Rectangle rectangle = 2;\n Circle circle = 3;\n Triangle triangle = 4;\n Parallelogram parallelogram = 5;\n }\n}\n\nmessage Square {\n float edge_length = 1;\n}\n\nmessage Rectangle {\n float length = 1;\n float width = 2;\n}\n\nmessage Circle {\n float radius = 1;\n}\n\nmessage Triangle {\n float edge_a = 1;\n float edge_b = 2;\n float edge_c = 3;\n}\n\nmessage Parallelogram {\n float base_length = 1;\n float height = 2;\n}\n\nmessage AreaRequest {\n repeated ShapeMessage shapes = 1;\n}\n\nmessage AreaResponse {\n repeated float value = 1;\n}" - } - }, - "name": "protobuf", - "version": "0.1.17" - } - ] - }, - "provider": { - "name": "area-calculator-provider" - } -} \ No newline at end of file diff --git a/python/pacts/http-consumer-1-http-provider.json b/python/pacts/http-consumer-1-http-provider.json deleted file mode 100644 index f065a8ff9..000000000 --- a/python/pacts/http-consumer-1-http-provider.json +++ /dev/null @@ -1,154 +0,0 @@ -{ - "consumer": { - "name": "http-consumer-1" - }, - "interactions": [ - { - "description": "A POST request to create book", - "providerStates": [ - { - "name": "No book fixtures required" - } - ], - "request": { - "body": { - "author": "Margaret Atwood", - "description": "Brilliantly conceived and executed, this powerful evocation of twenty-first century America gives full rein to Margaret Atwood's devastating irony, wit and astute perception.", - "isbn": "0099740915", - "publicationDate": "1985-07-31T00:00:00+00:00", - "title": "The Handmaid's Tale" - }, - "headers": { - "Content-Type": "application/json" - }, - "matchingRules": { - "body": { - "$.author": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.description": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.isbn": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.publicationDate": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z)$" - } - ] - }, - "$.title": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - } - }, - "header": {} - }, - "method": "POST", - "path": "/api/books" - }, - "response": { - "body": { - "@context": "/api/contexts/Book", - "@id": "/api/books/0114b2a8-3347-49d8-ad99-0e792c5a30e6", - "@type": "Book", - "author": "Melisa Kassulke", - "description": "Quaerat odit quia nisi accusantium natus voluptatem. Explicabo corporis eligendi ut ut sapiente ut qui quidem. Optio amet velit aut delectus. Sed alias asperiores perspiciatis deserunt omnis. Mollitia unde id in.", - "publicationDate": "1999-02-13T00:00:00+07:00", - "reviews": [], - "title": "Voluptas et tempora repellat corporis excepturi." - }, - "headers": { - "Content-Type": "application/ld+json; charset=utf-8" - }, - "matchingRules": { - "body": { - "$.author": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.description": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$.publicationDate": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z)$" - } - ] - }, - "$.title": { - "combine": "AND", - "matchers": [ - { - "match": "type" - } - ] - }, - "$['@id']": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "^\\/api\\/books\\/[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$" - } - ] - } - }, - "header": {} - }, - "status": 200 - } - } - ], - "metadata": { - "pact-python": { - "ffi": "0.3.15" - }, - "pactRust": { - "ffi": "0.3.15", - "mockserver": "0.9.5", - "models": "1.0.0" - }, - "pactSpecification": { - "version": "3.0.0" - } - }, - "provider": { - "name": "http-provider" - } -} \ No newline at end of file diff --git a/python/pacts/http-consumer-2-http-provider.json b/python/pacts/http-consumer-2-http-provider.json deleted file mode 100644 index b60c5f730..000000000 --- a/python/pacts/http-consumer-2-http-provider.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "consumer": { - "name": "http-consumer-2" - }, - "interactions": [ - { - "description": "A PUT request to generate book cover", - "providerStates": [ - { - "name": "A book with id fb5a885f-f7e8-4a50-950f-c1a64a94d500 is required" - } - ], - "request": { - "body": [], - "headers": { - "Content-Type": "application/json" - }, - "method": "PUT", - "path": "/api/books/fb5a885f-f7e8-4a50-950f-c1a64a94d500/generate-cover" - }, - "response": { - "status": 204 - } - } - ], - "metadata": { - "pact-python": { - "ffi": "0.3.15" - }, - "pactRust": { - "ffi": "0.3.15", - "mockserver": "0.9.5", - "models": "1.0.0" - }, - "pactSpecification": { - "version": "3.0.0" - } - }, - "provider": { - "name": "http-provider" - } -} \ No newline at end of file diff --git a/python/pacts/message-consumer-2-message-provider.json b/python/pacts/message-consumer-2-message-provider.json deleted file mode 100644 index 4fba5ee67..000000000 --- a/python/pacts/message-consumer-2-message-provider.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "consumer": { - "name": "message-consumer-2" - }, - "messages": [ - { - "contents": { - "uuid": "fb5a885f-f7e8-4a50-950f-c1a64a94d500" - }, - "description": "Book (id fb5a885f-f7e8-4a50-950f-c1a64a94d500) created message", - "matchingRules": { - "body": { - "$.uuid": { - "combine": "AND", - "matchers": [ - { - "match": "regex", - "regex": "^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$" - } - ] - } - } - }, - "metadata": { - "contentType": "application/json" - }, - "providerStates": [ - { - "name": "A book with id fb5a885f-f7e8-4a50-950f-c1a64a94d500 is required" - } - ] - } - ], - "metadata": { - "pactRust": { - "ffi": "0.3.15", - "models": "1.0.0" - }, - "pactSpecification": { - "version": "3.0.0" - } - }, - "provider": { - "name": "message-provider" - } -} \ No newline at end of file diff --git a/python/register_ffi.py b/python/register_ffi.py deleted file mode 100644 index fa428f684..000000000 --- a/python/register_ffi.py +++ /dev/null @@ -1,72 +0,0 @@ -import platform -import os -import sys - -IS_64 = sys.maxsize > 2 ** 32 - -DIRECTIVES =[ -"#ifndef pact_ffi_h", -"#define pact_ffi_h", -"#include ", -"#include ", -"#include ", -"#include ", -"#endif /* pact_ffi_h */" - ] - -# Build with cargo build in pact_ffi crate -FFI_HEADER_DIR='../rust/pact_ffi/include/' -FFI_LIB_DIR='../rust/target/debug/' - -FFI_HEADER_PATH=f"{FFI_HEADER_DIR}pact.h" - -def process_pact_header_file(file): - with open(file, "r") as fp: - lines = fp.readlines() - - pactfile = [] - - for line in lines: - if line.strip("\n") not in DIRECTIVES: - pactfile.append(line) - - return ''.join(pactfile) - -def load_ffi_library(ffi): - """Load the right library.""" - target_platform = platform.platform().lower() - print(target_platform) - print(platform.machine()) - - if ("darwin" in target_platform or "macos" in target_platform) and ("aarch64" in platform.machine() or "arm64" in platform.machine()): - libname = os.path.abspath(f"{FFI_LIB_DIR}libpact_ffi.dylib") - # the commented out lib names come from the distributed binaries - # libname = os.path.abspath(f"{FFI_LIB_DIR}libpact_ffi-osx-aarch64-apple-darwin.dylib") - elif "darwin" in target_platform or "macos" in target_platform: - libname = os.path.abspath(f"{FFI_LIB_DIR}libpact_ffi.dylib") - # libname = os.path.abspath(f"{FFI_LIB_DIR}libpact_ffi-osx-x86_64.dylib") - elif "linux" in target_platform and IS_64 and ("aarch64" in platform.machine() or "arm64" in platform.machine()): - libname = os.path.abspath(f"{FFI_LIB_DIR}libpact_ffi.so") - # libname = os.path.abspath(f"{FFI_LIB_DIR}libpact_ffi-linux-aarch64.so") - elif "linux" in target_platform and IS_64: - libname = os.path.abspath(f"{FFI_LIB_DIR}libpact_ffi.so") - # libname = os.path.abspath(f"{FFI_LIB_DIR}libpact_ffi-linux-x86_64.so") - elif 'windows' in target_platform: - libname = os.path.abspath(f"{FFI_LIB_DIR}pact_ffi-windows-x86_64.dll") - # libname = os.path.abspath(f"{FFI_LIB_DIR}pact_ffi-windows-x86_64.dll") - else: - msg = ('Unfortunately, {} is not a supported platform. Only Linux,' - ' Windows, and OSX are currently supported.').format(target_platform) - raise Exception(msg) - - return ffi.dlopen(libname) - -def load_ffi_headers(ffi): - return ffi.cdef(process_pact_header_file(FFI_HEADER_PATH)) - -def get_ffi_lib(ffi): - load_ffi_headers(ffi) - lib = load_ffi_library(ffi) - return lib - - diff --git a/python/requires.txt b/python/requires.txt deleted file mode 100644 index 7f8169e58..000000000 --- a/python/requires.txt +++ /dev/null @@ -1,2 +0,0 @@ -cffi==1.14.6 -requests>=2.5.0 diff --git a/ruby/example_consumer_spec/.rspec b/ruby/example_consumer_spec/.rspec deleted file mode 100644 index 34c5164d9..000000000 --- a/ruby/example_consumer_spec/.rspec +++ /dev/null @@ -1,3 +0,0 @@ ---format documentation ---color ---require spec_helper diff --git a/ruby/example_consumer_spec/.ruby-version b/ruby/example_consumer_spec/.ruby-version deleted file mode 100644 index 437459cd9..000000000 --- a/ruby/example_consumer_spec/.ruby-version +++ /dev/null @@ -1 +0,0 @@ -2.5.0 diff --git a/ruby/example_consumer_spec/Gemfile b/ruby/example_consumer_spec/Gemfile deleted file mode 100644 index 2fbde4ffa..000000000 --- a/ruby/example_consumer_spec/Gemfile +++ /dev/null @@ -1,7 +0,0 @@ -source 'https://rubygems.org' - -gem 'pact_mockserver_mk2', path: '../pact_mockserver_mk2' - -gem 'rake' -gem 'rspec' -gem 'httparty', '>= 0.21.0' diff --git a/ruby/example_consumer_spec/Gemfile.lock b/ruby/example_consumer_spec/Gemfile.lock deleted file mode 100644 index 4232d541d..000000000 --- a/ruby/example_consumer_spec/Gemfile.lock +++ /dev/null @@ -1,45 +0,0 @@ -PATH - remote: ../pact_mockserver_mk2 - specs: - pact_mockserver_mk2 (0.0.0) - helix_runtime (~> 0.7.0) - -GEM - remote: https://rubygems.org/ - specs: - diff-lcs (1.3) - helix_runtime (0.7.3) - rake (>= 10.0) - thor (>= 0.19.4, < 2.0) - tomlrb (~> 1.2.4) - httparty (0.16.1) - multi_xml (>= 0.5.2) - multi_xml (0.6.0) - rake (13.0.1) - rspec (3.7.0) - rspec-core (~> 3.7.0) - rspec-expectations (~> 3.7.0) - rspec-mocks (~> 3.7.0) - rspec-core (3.7.1) - rspec-support (~> 3.7.0) - rspec-expectations (3.7.0) - diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.7.0) - rspec-mocks (3.7.0) - diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.7.0) - rspec-support (3.7.1) - thor (0.20.0) - tomlrb (1.2.6) - -PLATFORMS - ruby - -DEPENDENCIES - httparty - pact_mockserver_mk2! - rake - rspec - -BUNDLED WITH - 1.16.1 diff --git a/ruby/example_consumer_spec/Rakefile b/ruby/example_consumer_spec/Rakefile deleted file mode 100644 index 70a846df5..000000000 --- a/ruby/example_consumer_spec/Rakefile +++ /dev/null @@ -1,5 +0,0 @@ -require 'rspec/core/rake_task' - -RSpec::Core::RakeTask.new(:spec) - -task :default => :spec diff --git a/ruby/example_consumer_spec/spec/simple_consumer_spec.rb b/ruby/example_consumer_spec/spec/simple_consumer_spec.rb deleted file mode 100644 index cb0fd2d3b..000000000 --- a/ruby/example_consumer_spec/spec/simple_consumer_spec.rb +++ /dev/null @@ -1,152 +0,0 @@ -require 'httparty' - -RSpec.describe 'Simple Consumer Spec' do - - describe 'with matching requests' do - - let(:pact) do - ' - { - "provider": { - "name": "Alice Service" - }, - "consumer": { - "name": "Consumer" - }, - "interactions": [ - { - "description": "a retrieve Mallory request", - "request": { - "method": "GET", - "path": "/mallory", - "query": "name=ron&status=good" - }, - "response": { - "status": 200, - "headers": { - "Content-Type": "text/html" - }, - "body": "That is some good Mallory." - } - } - ], - "metadata": { - "pact-specification": { - "version": "1.0.0" - }, - "pact-jvm": { - "version": "1.0.0" - } - } - } - ' - end - - let(:mock_server_port) { PactMockServerMk2::create_mock_server(pact, 0) } - - after do - PactMockServerMk2::cleanup_mock_server(mock_server_port) - end - - it 'executes the pact test with no errors' do - puts "Mock server port=#{mock_server_port}" - - response = HTTParty.get("http://localhost:#{mock_server_port}/mallory?name=ron&status=good") - - expect(response.body).to eq 'That is some good Mallory.' - expect(PactMockServerMk2::all_matched(mock_server_port)).to be true - end - end - - - describe 'with mismatching requests' do - - let(:pact) do - ' - { - "provider": { - "name": "test_provider" - }, - "consumer": { - "name": "test_consumer" - }, - "interactions": [ - { - "providerState": "test state", - "description": "test interaction", - "request": { - "method": "POST", - "path": "/", - "body": { - "complete": { - "certificateUri": "http://...", - "issues": { - "idNotFound": {} - }, - "nevdis": { - "body": null, - "colour": null, - "engine": null - }, - "body": 123456 - }, - "body": [ - 1, - 2, - 3 - ] - } - }, - "response": { - "status": 200 - } - } - ], - "metadata": { - "pact-specification": { - "version": "2.0.0" - }, - "pact-jvm": { - "version": "" - } - } - } - ' - end - - let(:mock_server_port) { PactMockServerMk2::create_mock_server(pact, 0) } - - after do - PactMockServerMk2::cleanup_mock_server(mock_server_port) - end - - it 'returns the mismatches' do - puts "Mock server port=#{mock_server_port}" - - expect(PactMockServerMk2::all_matched(mock_server_port)).to be false - - response1 = HTTParty.post("http://localhost:#{mock_server_port}/", - :headers => {'Content-Type': 'application/json'}, :body => '{}') - - response2 = HTTParty.put("http://localhost:#{mock_server_port}/mallory", body: { - :complete => { - :certificateUri => "http://...", - :issues => {}, - :nevdis => { - :body => "red", - :colour => nil, - :engine => nil - }, - :body => "123456" - }, - :body => [1, 3] - }) - - expect(PactMockServerMk2::all_matched(mock_server_port)).to be false - mismatchers = PactMockServerMk2::mock_server_mismatches(mock_server_port) - puts mismatchers - expect(mismatchers.length).to eql(2) - end - end - -end diff --git a/ruby/example_consumer_spec/spec/spec_helper.rb b/ruby/example_consumer_spec/spec/spec_helper.rb deleted file mode 100644 index dae29caf4..000000000 --- a/ruby/example_consumer_spec/spec/spec_helper.rb +++ /dev/null @@ -1,14 +0,0 @@ -require 'bundler/setup' -require 'pact_mockserver_mk2' - -RSpec.configure do |config| - # Enable flags like --only-failures and --next-failure - config.example_status_persistence_file_path = '.rspec_status' - - # Disable RSpec exposing methods globally on `Module` and `main` - config.disable_monkey_patching! - - config.expect_with :rspec do |c| - c.syntax = :expect - end -end diff --git a/ruby/pact_mockserver_mk2/.rspec b/ruby/pact_mockserver_mk2/.rspec deleted file mode 100644 index 34c5164d9..000000000 --- a/ruby/pact_mockserver_mk2/.rspec +++ /dev/null @@ -1,3 +0,0 @@ ---format documentation ---color ---require spec_helper diff --git a/ruby/pact_mockserver_mk2/.ruby-version b/ruby/pact_mockserver_mk2/.ruby-version deleted file mode 100644 index 437459cd9..000000000 --- a/ruby/pact_mockserver_mk2/.ruby-version +++ /dev/null @@ -1 +0,0 @@ -2.5.0 diff --git a/ruby/pact_mockserver_mk2/.travis.yml b/ruby/pact_mockserver_mk2/.travis.yml deleted file mode 100644 index 6a8e36fa1..000000000 --- a/ruby/pact_mockserver_mk2/.travis.yml +++ /dev/null @@ -1,5 +0,0 @@ -sudo: false -language: ruby -rvm: - - 2.5.0 -before_install: gem install bundler -v 1.16.1 diff --git a/ruby/pact_mockserver_mk2/CODE_OF_CONDUCT.md b/ruby/pact_mockserver_mk2/CODE_OF_CONDUCT.md deleted file mode 100644 index 58b1f842a..000000000 --- a/ruby/pact_mockserver_mk2/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,74 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as -contributors and maintainers pledge to making participation in our project and -our community a harassment-free experience for everyone, regardless of age, body -size, disability, ethnicity, gender identity and expression, level of experience, -nationality, personal appearance, race, religion, or sexual identity and -orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment -include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or -advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic - address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable -behavior and are expected to take appropriate and fair corrective action in -response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or -reject comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct, or to ban temporarily or -permanently any contributor for other behaviors that they deem inappropriate, -threatening, offensive, or harmful. - -## Scope - -This Code of Conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. Examples of -representing a project or community include using an official project e-mail -address, posting via an official social media account, or acting as an appointed -representative at an online or offline event. Representation of a project may be -further defined and clarified by project maintainers. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team at uglyog@gmail.com. All -complaints will be reviewed and investigated and will result in a response that -is deemed necessary and appropriate to the circumstances. The project team is -obligated to maintain confidentiality with regard to the reporter of an incident. -Further details of specific enforcement policies may be posted separately. - -Project maintainers who do not follow or enforce the Code of Conduct in good -faith may face temporary or permanent repercussions as determined by other -members of the project's leadership. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, -available at [http://contributor-covenant.org/version/1/4][version] - -[homepage]: http://contributor-covenant.org -[version]: http://contributor-covenant.org/version/1/4/ diff --git a/ruby/pact_mockserver_mk2/Cargo.lock b/ruby/pact_mockserver_mk2/Cargo.lock deleted file mode 100644 index 7e09ee568..000000000 --- a/ruby/pact_mockserver_mk2/Cargo.lock +++ /dev/null @@ -1,730 +0,0 @@ -[[package]] -name = "aho-corasick" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "ansi_term" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "base64" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "safemem 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "bitflags" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "byteorder" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "cfg-if" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "cookie" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "cstr-macro" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "difference" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "dtoa" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "either" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "env_logger" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "fuchsia-zircon" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "fuchsia-zircon-sys" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "helix" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cstr-macro 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", - "libcruby-sys 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "hex" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "hpack" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "httparse" -version = "1.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "hyper" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cookie 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", - "httparse 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)", - "language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "mime 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", - "solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", - "traitobject 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "unicase 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "idna" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "indextree" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "itertools" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "itertools" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "itoa" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "kernel32-sys" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "language-tags" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "lazy_static" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "lazy_static" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "libc" -version = "0.2.40" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "libcruby-sys" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "log" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "log" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "maplit" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "matches" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "memchr" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "mime" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "num-traits" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "num_cpus" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "p-macro" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "pact_matching" -version = "0.4.1" -dependencies = [ - "ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "base64 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "difference 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "hyper 0.9.18 (registry+https://github.com/rust-lang/crates.io-index)", - "indextree 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "maplit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "p-macro 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", - "semver 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)", - "sxd-document 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", - "uuid 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "pact_mock_server" -version = "0.4.1" -dependencies = [ - "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", - "hyper 0.9.18 (registry+https://github.com/rust-lang/crates.io-index)", - "itertools 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "maplit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "p-macro 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pact_matching 0.4.1", - "serde_json 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)", - "uuid 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "pact_mockserver_mk2" -version = "0.0.0" -dependencies = [ - "helix 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "pact_matching 0.4.1", - "pact_mock_server 0.4.1", - "serde_json 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)", - "simplelog 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "uuid 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "percent-encoding" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "peresil" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "proc-macro2" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "quote" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand" -version = "0.3.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "redox_syscall" -version = "0.1.37" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "regex" -version = "0.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "aho-corasick 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "regex-syntax" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rustc-serialize" -version = "0.3.24" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "safemem" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "semver" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "semver-parser" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "serde" -version = "1.0.37" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "serde_derive" -version = "1.0.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive_internals 0.23.0 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "serde_derive_internals" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "serde_json" -version = "1.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "itoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "simplelog" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "solicit" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "hpack 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "sxd-document" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "peresil 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "typed-arena 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "syn" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "term" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "thread_local" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "time" -version = "0.1.39" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "traitobject" -version = "0.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "typeable" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "typed-arena" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "ucd-util" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "unicase" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "version_check 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "unicode-bidi" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "unicode-normalization" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "unicode-xid" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "unreachable" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "url" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "utf8-ranges" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "uuid" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "uuid" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "version_check" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "void" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "winapi-build" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[metadata] -"checksum aho-corasick 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d6531d44de723825aa81398a6415283229725a00fa30713812ab9323faa82fc4" -"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6" -"checksum base64 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5032d51da2741729bfdaeb2664d9b8c6d9fd1e2b90715c660b6def36628499c2" -"checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf" -"checksum byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "73b5bdfe7ee3ad0b99c9801d58807a9dbc9e09196365b0203853b99889ab3c87" -"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de" -"checksum cookie 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0e3d6405328b6edb412158b3b7710e2634e23f3614b9bb1c412df7952489a626" -"checksum cstr-macro 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "db53fddba18cdd35477a7213a3ef6acfbfa333c31b42ce019e544c4a1420a06f" -"checksum difference 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b3304d19798a8e067e48d8e69b2c37f0b5e9b4e462504ad9e27e9f3fce02bba8" -"checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab" -"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0" -"checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b" -"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" -"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" -"checksum helix 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1a5e45d09f37327fded2b1e0939ab4c061e181b0c007f82c53ccd03e869cf085" -"checksum hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa" -"checksum hpack 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d2da7d3a34cf6406d9d700111b8eafafe9a251de41ae71d8052748259343b58" -"checksum httparse 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c2f407128745b78abc95c0ffbe4e5d37427fdc0d45470710cfef8c44522a2e37" -"checksum hyper 0.9.18 (registry+https://github.com/rust-lang/crates.io-index)" = "1b9bf64f730d6ee4b0528a5f0a316363da9d8104318731509d4ccc86248f82b3" -"checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d" -"checksum indextree 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7f4891aa1dfa3c697a4406e93d952d924b34664a040250a7d5e6a45290aaa6c0" -"checksum itertools 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d3f2be4da1690a039e9ae5fd575f706a63ad5a2120f161b1d653c9da3930dd21" -"checksum itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)" = "f58856976b776fedd95533137617a02fb25719f40e7d9b01c7043cd65474f450" -"checksum itoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c069bbec61e1ca5a596166e55dfe4773ff745c3d16b700013bcaff9a6df2c682" -"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -"checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a" -"checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" -"checksum lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c8f31047daa365f19be14b47c29df4f7c3b581832407daabe6ae77397619237d" -"checksum libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)" = "6fd41f331ac7c5b8ac259b8bf82c75c0fb2e469bbf37d2becbba9a6a2221965b" -"checksum libcruby-sys 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e8b0fb9beb529127d706dd12e26f961be3e527badb74a7b2e5d6b9d928fe6059" -"checksum log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b" -"checksum log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "89f010e843f2b1a31dbd316b3b8d443758bc634bed37aabade59c686d644e0a2" -"checksum maplit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "22593015b8df7747861c69c28acd32589fb96c1686369f3b661d12e409d4cf65" -"checksum matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "100aabe6b8ff4e4a7e32c1c13523379802df0772b82466207ac25b013f193376" -"checksum memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "796fba70e76612589ed2ce7f45282f5af869e0fdd7cc6199fa1aa1f1d591ba9d" -"checksum mime 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ba626b8a6de5da682e1caa06bdb42a335aee5a84db8e5046a3e8ab17ba0a3ae0" -"checksum num-traits 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dee092fcdf725aee04dd7da1d21debff559237d49ef1cb3e69bcb8ece44c7364" -"checksum num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c51a3322e4bca9d212ad9a158a02abc6934d005490c054a2778df73a70aa0a30" -"checksum p-macro 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a2e6ec260ffca4e190641151b2d900ce473300f53de8f298a132ad37168581c9" -"checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" -"checksum peresil 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f658886ed52e196e850cfbbfddab9eaa7f6d90dd0929e264c31e5cec07e09e57" -"checksum proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "77997c53ae6edd6d187fec07ec41b207063b5ee6f33680e9fa86d405cdd313d4" -"checksum quote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7b0ff51282f28dc1b53fd154298feaa2e77c5ea0dba68e1fd8b03b72fbe13d2a" -"checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1" -"checksum rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "eba5f8cb59cc50ed56be8880a5c7b496bfd9bd26394e176bc67884094145c2c5" -"checksum redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "0d92eecebad22b767915e4d529f89f28ee96dbbf5a4810d2b844373f136417fd" -"checksum regex 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "aec3f58d903a7d2a9dc2bf0e41a746f4530e0cab6b615494e058f67a3ef947fb" -"checksum regex-syntax 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b2550876c31dc914696a6c2e01cbce8afba79a93c8ae979d2fe051c0230b3756" -"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" -"checksum safemem 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e27a8b19b835f7aea908818e871f5cc3a5a186550c30773be987e155e8163d8f" -"checksum semver 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3fdd61b85a0fa777f7fb7c454b9189b2941b110d1385ce84d7f76efdf1606a85" -"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" -"checksum serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)" = "d3bcee660dcde8f52c3765dd9ca5ee36b4bf35470a738eb0bd5a8752b0389645" -"checksum serde_derive 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)" = "f1711ab8b208541fa8de00425f6a577d90f27bb60724d2bb5fd911314af9668f" -"checksum serde_derive_internals 0.23.0 (registry+https://github.com/rust-lang/crates.io-index)" = "89b340a48245bc03ddba31d0ff1709c118df90edc6adabaca4aac77aea181cce" -"checksum serde_json 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "5c508584d9913df116b91505eec55610a2f5b16e9ed793c46e4d0152872b3e74" -"checksum simplelog 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "24b615b1a3cc51ffa565d9a1d0cfcc49fe7d64737ada84eca284cddb0292d125" -"checksum solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "172382bac9424588d7840732b250faeeef88942e37b6e35317dce98cafdd75b2" -"checksum sxd-document 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1e360d2575b173847506841ca97dbab96b6d27a2ec660b70932fe32a2b12e0a2" -"checksum syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "91b52877572087400e83d24b9178488541e3d535259e04ff17a63df1e5ceff59" -"checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1" -"checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963" -"checksum time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "a15375f1df02096fb3317256ce2cee6a1f42fc84ea5ad5fc8c421cfe40c73098" -"checksum traitobject 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "07eaeb7689bb7fca7ce15628319635758eda769fed481ecfe6686ddef2600616" -"checksum typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887" -"checksum typed-arena 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5934776c3ac1bea4a9d56620d6bf2d483b20d394e49581db40f187e1118ff667" -"checksum ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd2be2d6639d0f8fe6cdda291ad456e23629558d466e2789d2c3e9892bda285d" -"checksum unicase 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7f4765f83163b74f957c797ad9253caf97f103fb064d3999aea9568d09fc8a33" -"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" -"checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f" -"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" -"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" -"checksum url 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f808aadd8cfec6ef90e4a14eb46f24511824d1ac596b9682703c87056c8678b7" -"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" -"checksum uuid 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bcc7e3b898aa6f6c08e5295b6c89258d1331e9ac578cc992fb818759951bdc22" -"checksum uuid 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4670e1e935f7edd193a413f802e2ee52274aed62a09ccaab1656515c9c53a66" -"checksum version_check 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6b772017e347561807c1aa192438c5fd74242a670a6cffacc40f2defd1dc069d" -"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" -"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" -"checksum winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "04e3bd221fcbe8a271359c04f21a76db7d0c6028862d1bb5512d85e1e2eb5bb3" -"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" -"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" -"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/ruby/pact_mockserver_mk2/Cargo.toml b/ruby/pact_mockserver_mk2/Cargo.toml deleted file mode 100644 index a88bb9cd2..000000000 --- a/ruby/pact_mockserver_mk2/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "pact_mockserver_mk2" -version = "0.0.0" -authors = ["Ronald Holshausen "] - -[lib] -crate-type = ["cdylib"] - -[dependencies] -helix = "0.7" -pact_matching = { version = "0.5", path = "../../rust/pact_matching" } -pact_mock_server = { version = "0.5", path = "../../rust/pact_mock_server" } -serde_json = "1.0" -log = "0.3.8" -simplelog = "0.4.0" -uuid = { version = "0.6", features = ["v4"] } diff --git a/ruby/pact_mockserver_mk2/Gemfile b/ruby/pact_mockserver_mk2/Gemfile deleted file mode 100644 index d67893f6d..000000000 --- a/ruby/pact_mockserver_mk2/Gemfile +++ /dev/null @@ -1,6 +0,0 @@ -source 'https://rubygems.org' - -git_source(:github) { |repo_name| "https://github.com/#{repo_name}" } - -# Specify your gem's dependencies in pact_mockserver_mk2.gemspec -gemspec diff --git a/ruby/pact_mockserver_mk2/Gemfile.lock b/ruby/pact_mockserver_mk2/Gemfile.lock deleted file mode 100644 index 8bdbf256e..000000000 --- a/ruby/pact_mockserver_mk2/Gemfile.lock +++ /dev/null @@ -1,42 +0,0 @@ -PATH - remote: . - specs: - pact_mockserver_mk2 (0.0.0) - helix_runtime (~> 0.7.0) - -GEM - remote: https://rubygems.org/ - specs: - diff-lcs (1.3) - helix_runtime (0.7.3) - rake (>= 10.0) - thor (>= 0.19.4, < 2.0) - tomlrb (~> 1.2.4) - rake (13.0.1) - rspec (3.7.0) - rspec-core (~> 3.7.0) - rspec-expectations (~> 3.7.0) - rspec-mocks (~> 3.7.0) - rspec-core (3.7.1) - rspec-support (~> 3.7.0) - rspec-expectations (3.7.0) - diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.7.0) - rspec-mocks (3.7.0) - diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.7.0) - rspec-support (3.7.1) - thor (0.20.0) - tomlrb (1.2.6) - -PLATFORMS - ruby - -DEPENDENCIES - bundler (~> 1.16) - pact_mockserver_mk2! - rake (~> 13.0) - rspec (~> 3.0) - -BUNDLED WITH - 1.16.1 diff --git a/ruby/pact_mockserver_mk2/LICENSE.txt b/ruby/pact_mockserver_mk2/LICENSE.txt deleted file mode 100644 index d0de87414..000000000 --- a/ruby/pact_mockserver_mk2/LICENSE.txt +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2018 Ronald Holshausen - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/ruby/pact_mockserver_mk2/README.md b/ruby/pact_mockserver_mk2/README.md deleted file mode 100644 index cc8a9bd8a..000000000 --- a/ruby/pact_mockserver_mk2/README.md +++ /dev/null @@ -1,43 +0,0 @@ -# Pact::Mockserver::Mk2 - -Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/pact/mockserver/mk2`. To experiment with that code, run `bin/console` for an interactive prompt. - -TODO: Delete this and the text above, and describe your gem - -## Installation - -Add this line to your application's Gemfile: - -```ruby -gem 'pact_mockserver_mk2' -``` - -And then execute: - - $ bundle - -Or install it yourself as: - - $ gem install pact-mockserver-mk2 - -## Usage - -TODO: Write usage instructions here - -## Development - -After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment. - -To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org). - -## Contributing - -Bug reports and pull requests are welcome on GitHub at https://github.com/[USERNAME]/pact-mockserver-mk2. This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct. - -## License - -The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT). - -## Code of Conduct - -Everyone interacting in the Pact::Mockserver::Mk2 project’s codebases, issue trackers, chat rooms and mailing lists is expected to follow the [code of conduct](https://github.com/[USERNAME]/pact-mockserver-mk2/blob/master/CODE_OF_CONDUCT.md). diff --git a/ruby/pact_mockserver_mk2/Rakefile b/ruby/pact_mockserver_mk2/Rakefile deleted file mode 100644 index 3583eac7e..000000000 --- a/ruby/pact_mockserver_mk2/Rakefile +++ /dev/null @@ -1,10 +0,0 @@ -require 'bundler/gem_tasks' -require 'rspec/core/rake_task' -require 'helix_runtime/build_task' - -HelixRuntime::BuildTask.new - -RSpec::Core::RakeTask.new(:spec) - -task :spec => :build -task :default => :spec diff --git a/ruby/pact_mockserver_mk2/bin/console b/ruby/pact_mockserver_mk2/bin/console deleted file mode 100755 index 3d5db9497..000000000 --- a/ruby/pact_mockserver_mk2/bin/console +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env ruby - -require 'bundler/setup' -require 'pact_mockserver_mk2' - -# You can add fixtures and/or initialization code here to make experimenting -# with your gem easier. You can also use a different console, if you like. - -# (If you use this, don't forget to add pry to your Gemfile!) -# require "pry" -# Pry.start - -require 'irb' -IRB.start(__FILE__) diff --git a/ruby/pact_mockserver_mk2/bin/setup b/ruby/pact_mockserver_mk2/bin/setup deleted file mode 100755 index dce67d860..000000000 --- a/ruby/pact_mockserver_mk2/bin/setup +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail -IFS=$'\n\t' -set -vx - -bundle install - -# Do any other automated setup that you need to do here diff --git a/ruby/pact_mockserver_mk2/lib/pact/mockserver/mk2.rb b/ruby/pact_mockserver_mk2/lib/pact/mockserver/mk2.rb deleted file mode 100644 index 9b6957a0c..000000000 --- a/ruby/pact_mockserver_mk2/lib/pact/mockserver/mk2.rb +++ /dev/null @@ -1,9 +0,0 @@ -require 'pact/mockserver/mk2/version' - -module Pact - module Mockserver - module Mk2 - # Your code goes here... - end - end -end diff --git a/ruby/pact_mockserver_mk2/lib/pact/mockserver/mk2/version.rb b/ruby/pact_mockserver_mk2/lib/pact/mockserver/mk2/version.rb deleted file mode 100644 index 6ab2df24d..000000000 --- a/ruby/pact_mockserver_mk2/lib/pact/mockserver/mk2/version.rb +++ /dev/null @@ -1,7 +0,0 @@ -module Pact - module Mockserver - module Mk2 - VERSION = '0.0.0'.freeze - end - end -end diff --git a/ruby/pact_mockserver_mk2/lib/pact_mockserver_mk2.rb b/ruby/pact_mockserver_mk2/lib/pact_mockserver_mk2.rb deleted file mode 100644 index fa1dcfc37..000000000 --- a/ruby/pact_mockserver_mk2/lib/pact_mockserver_mk2.rb +++ /dev/null @@ -1,10 +0,0 @@ -require 'helix_runtime' - -begin - require 'pact_mockserver_mk2/native' -rescue LoadError => e - warn 'Unable to load pact_mockserver_mk2 native. Please run `rake build`' - warn e -end - -require 'pact/mockserver/mk2' diff --git a/ruby/pact_mockserver_mk2/pact_mockserver_mk2.gemspec b/ruby/pact_mockserver_mk2/pact_mockserver_mk2.gemspec deleted file mode 100644 index ec4f67e6f..000000000 --- a/ruby/pact_mockserver_mk2/pact_mockserver_mk2.gemspec +++ /dev/null @@ -1,37 +0,0 @@ - -lib = File.expand_path('../lib', __FILE__) -$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) -require 'pact/mockserver/mk2/version' - -Gem::Specification.new do |spec| - spec.name = 'pact_mockserver_mk2' - spec.version = Pact::Mockserver::Mk2::VERSION - spec.authors = ['Ronald Holshausen'] - spec.email = ['uglyog@gmail.com'] - - spec.summary = 'Pact mock server library based on the Pact Rust implementation' - # spec.description = 'TODO: Write a longer description or delete this line.' - spec.homepage = "TODO: Put your gem's website or public repo URL here." - spec.license = 'MIT' - - # Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host' - # to allow pushing to a single host or delete this section to allow pushing to any host. - if spec.respond_to?(:metadata) - spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'" - else - raise 'RubyGems 2.0 or newer is required to protect against public gem pushes.' - end - - spec.files = `git ls-files -z`.split("\x0").reject do |f| - f.match(%r{^(test|spec|features)/}) - end - spec.bindir = 'exe' - spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } - spec.require_paths = ['lib'] - - spec.add_dependency 'helix_runtime', '~> 0.7.0' - - spec.add_development_dependency 'bundler', '~> 1.16' - spec.add_development_dependency 'rake', '~> 13.0' - spec.add_development_dependency 'rspec', '~> 3.0' -end diff --git a/ruby/pact_mockserver_mk2/spec/pact/pact_mockserver_mk2_spec.rb b/ruby/pact_mockserver_mk2/spec/pact/pact_mockserver_mk2_spec.rb deleted file mode 100644 index 608964905..000000000 --- a/ruby/pact_mockserver_mk2/spec/pact/pact_mockserver_mk2_spec.rb +++ /dev/null @@ -1,7 +0,0 @@ -require 'pact_mockserver_mk2' - -RSpec.describe Pact::Mockserver::Mk2 do - it 'has a version number' do - expect(Pact::Mockserver::Mk2::VERSION).not_to be nil - end -end diff --git a/ruby/pact_mockserver_mk2/spec/spec_helper.rb b/ruby/pact_mockserver_mk2/spec/spec_helper.rb deleted file mode 100644 index dae29caf4..000000000 --- a/ruby/pact_mockserver_mk2/spec/spec_helper.rb +++ /dev/null @@ -1,14 +0,0 @@ -require 'bundler/setup' -require 'pact_mockserver_mk2' - -RSpec.configure do |config| - # Enable flags like --only-failures and --next-failure - config.example_status_persistence_file_path = '.rspec_status' - - # Disable RSpec exposing methods globally on `Module` and `main` - config.disable_monkey_patching! - - config.expect_with :rspec do |c| - c.syntax = :expect - end -end diff --git a/ruby/pact_mockserver_mk2/src/lib.rs b/ruby/pact_mockserver_mk2/src/lib.rs deleted file mode 100644 index a40b65743..000000000 --- a/ruby/pact_mockserver_mk2/src/lib.rs +++ /dev/null @@ -1,60 +0,0 @@ -#[macro_use] extern crate helix; -extern crate pact_mock_server; -extern crate pact_matching; -#[macro_use] extern crate serde_json; -#[macro_use] extern crate log; -extern crate simplelog; -extern crate uuid; - -use simplelog::*; -use uuid::Uuid; -use pact_matching::models::Pact; -use pact_mock_server::MatchResult; - -ruby! { - - class PactMockServerMk2 { - def create_mock_server(pact_json: String, port: i32) -> Result { - SimpleLogger::init(LogLevelFilter::Info, Config::default()).unwrap_or(()); - - match serde_json::from_str(&pact_json) { - Ok(pact_json) => { - let pact = Pact::from_json(&"".to_string(), &pact_json); - pact_mock_server::start_mock_server(Uuid::new_v4().simple().to_string(), pact, port) - .map_err(|err| { - error!("Could not start mock server: {}", err); - format!("Could not start mock server: {}", err) - }) - }, - Err(err) => { - error!("Could not parse pact json: {}", err); - Err(format!("Could not parse pact json: {}", err)) - } - } - } - - def cleanup_mock_server(port: i32) -> bool { - pact_mock_server::shutdown_mock_server_by_port(port) - } - - def all_matched(port: i32) -> bool { - pact_mock_server::mock_server_matched(port) - } - - def mock_server_mismatches(port: i32) -> Option> { - pact_mock_server::lookup_mock_server_by_port(port, &|mock_server| { - mock_server.mismatches().iter() - .map(|mismatch| mismatch.to_json().to_string() ) - .collect() - }) - } - } -} - -#[cfg(test)] -mod tests { - #[test] - fn it_works() { - assert_eq!(2 + 2, 4); - } -}