[DRE-commits] [ruby-elasticsearch] 01/03: Imported Upstream version 2.0.0
Macártur Carvalho
macartur-guest at moszumanska.debian.org
Mon Aug 8 22:33:52 UTC 2016
This is an automated email from the git hooks/post-receive script.
macartur-guest pushed a commit to branch master
in repository ruby-elasticsearch.
commit abf43767800ee7ffb6b825684fbb4f3f44dc616f
Author: Macártur Carvalho <macartur.sc at gmail.com>
Date: Mon Aug 8 19:22:33 2016 -0300
Imported Upstream version 2.0.0
---
.travis.yml | 20 +-
CHANGELOG.md | 171 +++++
Gemfile | 6 +-
README.md | 34 +-
Rakefile | 73 ++-
elasticsearch-api/Gemfile | 6 +-
elasticsearch-api/README.md | 20 +-
elasticsearch-api/Rakefile | 8 +-
elasticsearch-api/elasticsearch-api.gemspec | 7 +-
elasticsearch-api/lib/elasticsearch/api.rb | 24 +-
.../elasticsearch/api/actions/abort_benchmark.rb | 2 -
.../lib/elasticsearch/api/actions/bulk.rb | 21 +-
.../lib/elasticsearch/api/actions/cat/aliases.rb | 3 +
.../elasticsearch/api/actions/cat/allocation.rb | 3 +
.../lib/elasticsearch/api/actions/cat/count.rb | 3 +
.../lib/elasticsearch/api/actions/cat/fielddata.rb | 3 +
.../lib/elasticsearch/api/actions/cat/health.rb | 3 +
.../lib/elasticsearch/api/actions/cat/indices.rb | 3 +
.../lib/elasticsearch/api/actions/cat/master.rb | 3 +
.../api/actions/cat/{plugins.rb => nodeattrs.rb} | 17 +-
.../lib/elasticsearch/api/actions/cat/nodes.rb | 5 +-
.../elasticsearch/api/actions/cat/pending_tasks.rb | 3 +
.../lib/elasticsearch/api/actions/cat/plugins.rb | 8 +-
.../lib/elasticsearch/api/actions/cat/recovery.rb | 3 +
.../actions/cat/{plugins.rb => repositories.rb} | 25 +-
.../lib/elasticsearch/api/actions/cat/segments.rb | 8 +-
.../lib/elasticsearch/api/actions/cat/shards.rb | 8 +
.../lib/elasticsearch/api/actions/cat/snapshots.rb | 48 ++
.../lib/elasticsearch/api/actions/cat/tasks.rb | 45 ++
.../elasticsearch/api/actions/cat/thread_pool.rb | 6 +
.../api/actions/cluster/allocation_explain.rb | 28 +
.../api/actions/cluster/get_settings.rb | 8 +-
.../elasticsearch/api/actions/cluster/health.rb | 5 +-
.../api/actions/cluster/pending_tasks.rb | 2 +-
.../elasticsearch/api/actions/cluster/reroute.rb | 7 +-
.../lib/elasticsearch/api/actions/cluster/stats.rb | 30 +
.../elasticsearch/api/actions/delete_by_query.rb | 2 +
.../lib/elasticsearch/api/actions/field_stats.rb | 4 +-
.../lib/elasticsearch/api/actions/index.rb | 4 +
.../elasticsearch/api/actions/indices/analyze.rb | 5 +
.../api/actions/indices/clear_cache.rb | 4 +-
.../lib/elasticsearch/api/actions/indices/close.rb | 3 +-
.../elasticsearch/api/actions/indices/create.rb | 9 +-
.../api/actions/indices/exists_type.rb | 2 +-
.../api/actions/indices/flush_synced.rb | 35 +
.../api/actions/indices/forcemerge.rb | 62 ++
.../lib/elasticsearch/api/actions/indices/get.rb | 14 +-
.../api/actions/indices/get_field_mapping.rb | 8 +-
.../api/actions/indices/get_settings.rb | 5 +
.../api/actions/indices/get_warmer.rb | 5 +-
.../lib/elasticsearch/api/actions/indices/open.rb | 3 +-
.../elasticsearch/api/actions/indices/optimize.rb | 2 +
.../api/actions/indices/put_mapping.rb | 5 +-
.../api/actions/indices/put_settings.rb | 9 +
.../lib/elasticsearch/api/actions/indices/seal.rb | 2 -
.../elasticsearch/api/actions/indices/segments.rb | 14 +-
.../api/actions/indices/shard_stores.rb | 34 +
.../elasticsearch/api/actions/indices/status.rb | 6 +-
.../api/actions/ingest/delete_pipeline.rb | 33 +
.../api/actions/ingest/get_pipeline.rb | 31 +
.../api/actions/ingest/put_pipeline.rb | 36 ++
.../elasticsearch/api/actions/ingest/simulate.rb | 32 +
.../elasticsearch/api/actions/list_benchmarks.rb | 2 -
.../lib/elasticsearch/api/actions/mpercolate.rb | 5 +-
.../lib/elasticsearch/api/actions/msearch.rb | 8 +-
.../elasticsearch/api/actions/nodes/hot_threads.rb | 4 +-
.../lib/elasticsearch/api/actions/nodes/info.rb | 6 +-
.../lib/elasticsearch/api/actions/nodes/stats.rb | 4 +-
.../lib/elasticsearch/api/actions/percolate.rb | 2 +-
.../lib/elasticsearch/api/actions/ping.rb | 8 +-
.../lib/elasticsearch/api/actions/reindex.rb | 69 ++
.../api/actions/render_search_template.rb | 25 +
.../lib/elasticsearch/api/actions/search.rb | 21 +-
.../elasticsearch/api/actions/snapshot/delete.rb | 7 +-
.../api/actions/snapshot/delete_repository.rb | 7 +-
.../lib/elasticsearch/api/actions/snapshot/get.rb | 7 +-
.../api/actions/snapshot/get_repository.rb | 7 +-
.../elasticsearch/api/actions/snapshot/status.rb | 7 +-
.../lib/elasticsearch/api/actions/tasks/cancel.rb | 40 ++
.../lib/elasticsearch/api/actions/tasks/list.rb | 50 ++
.../elasticsearch/api/actions/update_by_query.rb | 131 ++++
.../lib/elasticsearch/api/namespace/ingest.rb | 20 +
.../lib/elasticsearch/api/namespace/tasks.rb | 20 +
elasticsearch-api/lib/elasticsearch/api/utils.rb | 105 ++-
elasticsearch-api/lib/elasticsearch/api/version.rb | 2 +-
.../test/integration/yaml_test_runner.rb | 74 ++-
elasticsearch-api/test/unit/api_test.rb | 4 +
elasticsearch-api/test/unit/bulk_test.rb | 43 +-
.../cat/{plugins_test.rb => nodeattrs_test.rb} | 8 +-
elasticsearch-api/test/unit/cat/plugins_test.rb | 2 +-
.../cat/{plugins_test.rb => repositories_test.rb} | 8 +-
.../cat/{plugins_test.rb => snapshots_test.rb} | 8 +-
.../unit/cat/{plugins_test.rb => tasks_test.rb} | 8 +-
...ng_tasks_test.rb => allocation_explain_test.rb} | 10 +-
elasticsearch-api/test/unit/cluster/health_test.rb | 9 +
.../test/unit/cluster/pending_tasks_test.rb | 2 +-
.../{cat/plugins_test.rb => cluster/stats_test.rb} | 8 +-
.../test/unit/indices/flush_synced_test.rb | 41 ++
.../plugins_test.rb => indices/forcemerge_test.rb} | 10 +-
.../shard_stores_test.rb} | 8 +-
elasticsearch-api/test/unit/indices/status_test.rb | 8 +
.../test/unit/ingest/delete_pipeline_test.rb | 41 ++
.../health_test.rb => ingest/get_pipeline_test.rb} | 23 +-
.../test/unit/ingest/put_pipeline_test.rb | 46 ++
.../health_test.rb => ingest/simulate_test.rb} | 19 +-
elasticsearch-api/test/unit/percolate_test.rb | 9 +
elasticsearch-api/test/unit/ping_test.rb | 7 +-
.../unit/{cat/plugins_test.rb => reindex_test.rb} | 12 +-
...gins_test.rb => render_search_template_test.rb} | 13 +-
elasticsearch-api/test/unit/search_test.rb | 8 +
.../health_test.rb => tasks/cancel_test.rb} | 20 +-
.../{cluster/health_test.rb => tasks/list_test.rb} | 16 +-
...nding_tasks_test.rb => update_by_query_test.rb} | 12 +-
elasticsearch-api/test/unit/utils_test.rb | 89 ++-
elasticsearch-dsl/README.md | 39 +-
elasticsearch-dsl/elasticsearch-dsl.gemspec | 6 +-
elasticsearch-dsl/lib/elasticsearch/dsl/search.rb | 4 +-
.../lib/elasticsearch/dsl/search/aggregation.rb | 2 +-
.../dsl/search/aggregations/global.rb | 10 +-
.../dsl/search/aggregations/missing.rb | 36 ++
.../dsl/search/aggregations/pipeline/avg_bucket.rb | 34 +
.../search/aggregations/pipeline/bucket_script.rb | 36 ++
.../aggregations/pipeline/bucket_selector.rb | 35 +
.../search/aggregations/pipeline/cumulative_sum.rb | 33 +
.../dsl/search/aggregations/pipeline/derivative.rb | 34 +
.../aggregations/pipeline/extended_stats_bucket.rb | 34 +
.../dsl/search/aggregations/pipeline/max_bucket.rb | 34 +
.../dsl/search/aggregations/pipeline/min_bucket.rb | 34 +
.../dsl/search/aggregations/pipeline/moving_avg.rb | 42 ++
.../aggregations/pipeline/percentiles_bucket.rb | 36 ++
.../search/aggregations/pipeline/serial_diff.rb | 36 ++
.../search/aggregations/pipeline/stats_bucket.rb | 34 +
.../dsl/search/aggregations/pipeline/sum_bucket.rb | 34 +
.../elasticsearch/dsl/search/aggregations/stats.rb | 17 +-
.../elasticsearch/dsl/search/aggregations/terms.rb | 14 +-
.../lib/elasticsearch/dsl/search/filters/range.rb | 1 +
.../lib/elasticsearch/dsl/search/queries/bool.rb | 10 +
.../lib/elasticsearch/dsl/search/queries/match.rb | 1 +
.../lib/elasticsearch/dsl/search/queries/range.rb | 1 +
.../lib/elasticsearch/dsl/search/sort.rb | 13 +-
elasticsearch-dsl/lib/elasticsearch/dsl/version.rb | 2 +-
.../integration/search_aggregation_geo_test.rb | 2 +-
.../test/integration/search_aggregations_test.rb | 25 +-
elasticsearch-dsl/test/integration/search_test.rb | 60 ++
.../test/unit/aggregations/global_test.rb | 20 +-
.../test/unit/aggregations/missing_test.rb | 39 ++
.../unit/aggregations/pipeline/avg_bucket_test.rb | 39 ++
.../aggregations/pipeline/bucket_script_test.rb | 39 ++
.../aggregations/pipeline/bucket_selector_test.rb | 38 ++
.../aggregations/pipeline/cumulative_sum_test.rb | 37 ++
.../unit/aggregations/pipeline/derivative_test.rb | 39 ++
.../pipeline/extended_stats_bucket_test.rb | 38 ++
.../unit/aggregations/pipeline/max_bucket_test.rb | 38 ++
.../unit/aggregations/pipeline/min_bucket_test.rb | 38 ++
.../unit/aggregations/pipeline/moving_avg_test.rb | 41 ++
.../pipeline/percentiles_bucket_test.rb | 39 ++
.../unit/aggregations/pipeline/serial_diff_test.rb | 39 ++
.../aggregations/pipeline/stats_bucket_test.rb | 38 ++
.../unit/aggregations/pipeline/sum_bucket_test.rb | 38 ++
.../test/unit/aggregations/stats_test.rb | 8 +
elasticsearch-dsl/test/unit/filters/range_test.rb | 3 +-
elasticsearch-dsl/test/unit/queries/bool_test.rb | 9 +
elasticsearch-dsl/test/unit/queries/match_test.rb | 11 +-
elasticsearch-dsl/test/unit/queries/range_test.rb | 6 +-
elasticsearch-dsl/test/unit/search_sort_test.rb | 25 +
elasticsearch-dsl/test/unit/search_test.rb | 34 +
elasticsearch-extensions/README.md | 82 +++
elasticsearch-extensions/Rakefile | 2 +-
.../elasticsearch-extensions.gemspec | 7 +-
.../lib/elasticsearch/extensions.rb | 2 +
.../lib/elasticsearch/extensions/backup.rb | 43 +-
.../lib/elasticsearch/extensions/reindex.rb | 160 +++++
.../lib/elasticsearch/extensions/test/cluster.rb | 716 ++++++++++++++-------
.../lib/elasticsearch/extensions/version.rb | 2 +-
.../test/reindex/integration/reindex_test.rb | 81 +++
.../test/reindex/unit/reindex_test.rb | 106 +++
.../test/test/cluster/integration/cluster_test.rb | 29 +
.../test/test/cluster/unit/cluster_test.rb | 280 ++++++++
elasticsearch-extensions/test/test_helper.rb | 14 +-
elasticsearch-transport/Gemfile | 6 +-
elasticsearch-transport/README.md | 90 ++-
.../elasticsearch-transport.gemspec | 10 +-
.../lib/elasticsearch/transport.rb | 4 +-
.../lib/elasticsearch/transport/client.rb | 84 ++-
.../lib/elasticsearch/transport/transport/base.rb | 130 +++-
.../transport/transport/connections/collection.rb | 20 +
.../transport/transport/connections/connection.rb | 34 +-
.../transport/transport/connections/selector.rb | 2 +-
.../elasticsearch/transport/transport/http/curb.rb | 34 +-
.../transport/transport/http/faraday.rb | 20 +-
.../transport/transport/http/manticore.rb | 30 +-
.../elasticsearch/transport/transport/sniffer.rb | 16 +-
.../lib/elasticsearch/transport/version.rb | 2 +-
.../test/integration/client_test.rb | 80 ++-
.../test/integration/transport_test.rb | 4 +
elasticsearch-transport/test/unit/client_test.rb | 102 ++-
.../test/unit/connection_collection_test.rb | 46 +-
.../test/unit/connection_test.rb | 9 +
elasticsearch-transport/test/unit/sniffer_test.rb | 56 +-
.../test/unit/transport_base_test.rb | 130 +++-
.../test/unit/transport_curb_test.rb | 7 +
.../test/unit/transport_faraday_test.rb | 44 ++
.../test/unit/transport_manticore_test.rb | 6 +
.../elasticsearch-watcher.gemspec | 2 +-
elasticsearch/elasticsearch.gemspec | 11 +-
elasticsearch/lib/elasticsearch/version.rb | 2 +-
.../test/integration/client_integration_test.rb | 4 +
207 files changed, 5308 insertions(+), 717 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 6186952..2fb829e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,8 +2,12 @@
# Configuration file for http://travis-ci.org/#!/elasticsearch/elasticsearch-ruby
# -------------------------------------------------------------------------------
+sudo: false
+
language: ruby
+cache: bundler
+
branches:
only:
- master
@@ -20,29 +24,37 @@ jdk:
env:
- TEST_SUITE=unit
- - TEST_SUITE=integration
+ - TEST_SUITE=integration ES_VERSION=1.7.5
+ - TEST_SUITE=integration ES_VERSION=2.3.0
before_install:
- gem update --system
- gem --version
install:
- - test $TEST_SUITE == 'integration' && curl -# https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.0.0.tar.gz | tar xz -C /tmp || true
+ - test $TEST_SUITE == 'integration' && curl -s https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.7.5.tar.gz | tar xz -C /tmp || true
+ - test $TEST_SUITE == 'integration' && curl -s https://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/2.3.0/elasticsearch-2.3.0.tar.gz | tar xz -C /tmp || true
before_script:
+ - gem install bundler -v 1.11.2
- rake setup
- rake elasticsearch:update
- rake bundle:clean
- rake bundle:install
script:
- - SERVER=start TEST_BUILD_REF=tags/v1.0.0 TEST_CLUSTER_COMMAND=/tmp/elasticsearch-1.0.0/bin/elasticsearch rake test:$TEST_SUITE
+ - SERVER=start TEST_CLUSTER_LOGS=/tmp/log TEST_BUILD_REF=tags/v$ES_VERSION TEST_CLUSTER_COMMAND=/tmp/elasticsearch-$ES_VERSION/bin/elasticsearch rake test:$TEST_SUITE
matrix:
exclude:
- rvm: 1.8.7
jdk: openjdk7
- env: TEST_SUITE=integration
+ env: TEST_SUITE=integration ES_VERSION=1.7.5
+ - rvm: 1.8.7
+ jdk: openjdk7
+ env: TEST_SUITE=integration ES_VERSION=2.3.0
notifications:
disable: true
+
+sudo: false
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9e87a92..e2691a2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,174 @@
+## 2.0.0
+
+* Added deprecation notices to API methods and parameters not supported on Elasticsearch 2.x
+
+## DSL:0.1.4
+
+* Added correct implementation of `Sort#empty?`
+* Added the `filter` method to the Bool query
+* Added the pipeline aggregations
+* Allowed access to calling context from search block
+
+## EXT:0.0.22
+
+* Refactored and significantly improved the "Reindex" extension
+* Refactored and improved the `Extensions::Test::Cluster` extension
+
+## 1.0.18
+
+* Fixed the incorrect Rake dependency on Ruby 1.8 and updated the Rake dependency to 11.1
+* Simplified the main README and added the information about the DSL and Watcher libraries
+
+### API
+
+* Added `ignore: 404` to integration test setup blocks
+* Added options to the "Indices Get" and "Indices Flush Synced" APIs
+* Added the "Cat Tasks", "Cluster Stats", "Explain allocation", "Ingest", "Reindex" and "Update By Query" APIs
+* Added the `:terminate_after` parameter to the "Search" API
+* Added the `:timeout` option to the Nodes "Hot Threads", "Info" and "Stats" APIs
+* Added the `:timeout` parameter to the Nodes "Hot Threads", "Info" and "Stats" APIs
+* Added the `:verbose` option to the "Indices Segments" API and fixed formatting
+* Added the `explain` option to the "Analyze" API
+* Added the `filter` parameter for the "Indices Analyze" API
+* Added the `group_by` option to the "Tasks List" API
+* Added the `include_defaults` option to the "Get Cluster Settings" API
+* Added the `include_defaults` parameter to the "Indices" APIs
+* Added the `preserve_existing` option to the "Indices Put Settings" API
+* Added the `request_cache` parameter to the "Search" API
+* Added the `retry_failed` option to the "Cluster Reroute" API
+* Added the `size` parameter to the "Cat Thread Pool" API
+* Added the `update_all_types` parameter to "Indices Create" and "Indices Put Mapping" APIs
+* Added the parameters for ingest nodes into the "Bulk" and "Index" APIs
+* Fixes and improvements of handling the API method parameters
+* Changed, that the "Ping" API returns false also on connection errors (server "not reachable")
+* Added a `Utils.__report_unsupported_method` and `Utils.__report_unsupported_parameters` methods
+
+### Client
+
+* Fixed, that the clients tries to deserialize an empty body
+* Fixed, that dead connections have not been removed during reloading, leading to leaks
+
+## EXT:0.0.21
+
+* Improved the documentation for the "Backup" extension and added it to the main README
+* Added the information about the "Reindex" extension to the README
+* Added a reindex extension
+* Improved the `Elasticsearch::Extensions::Test::Cluster` extension
+
+## 1.0.17
+
+### Client
+
+* Fixed, that existing connections are not re-initialized during reloading ("sniffing")
+
+## 1.0.16
+
+* Added notes about ES 2.x compatibility
+* Fixes and updates to the Travis CI configuration
+* Updated the `elasticsearch:build` Rake task
+
+### API
+
+* Added the ability to set a custom JSON serializer
+* Added, that`fields` and `fielddata_fields` in the Search API are not escaped
+* Fixed the incorrect handling of `:data` keys in the Utils#__bulkify method
+* Added fixes to suppress warnings in the verbose mode
+* Added support for new Cat API calls
+
+### Client
+
+* Added, that username and password is automatically escaped in the URL
+* Changed, that the password is replaced with `*` characters in the log
+* Bumped the "manticore" gem dependency to 0.5
+* Improved the thread-safety of reloading connections
+* Improved the Manticore HTTP client
+* Fixed, that connections are reloaded _before_ getting a connection
+* Added a better interface for configuring global HTTP settings such as protocol or authentication
+
+## DSL:0.1.3
+
+* Changed, that `global` aggregation takes a block
+* Updated the README example to work with Elasticsearch 2.x
+* Improved the documentation and integration tests for inner (nested) aggregaation
+* Added the option method `field` and `script` to the "stats" aggregation
+
+## EXT:0.0.20
+
+* Fixed the implementation of keeping the test cluster data and state around between restarts
+
+## 1.0.15
+
+* Updated the Travis CI configuration
+
+### API
+
+* Added `bytes` as a valid parameter to "Shards" and "Segments" Cat API
+* Added support for the `local` argument in the "Get Warmer" API
+* Added support for `fields` argument in the "Get Field Mapping" API
+* Fixed an error in the YAML runner handling of ENV['TEST_CLUSTER_PARAMS']
+* Validate and extract params from indices.get_warmer arguments
+
+### Client
+
+* Added the option to configure the Faraday adapter using a block and the relevant documentation
+* Added information about configuring the client for the Amazon Elasticsearch Service
+* Added the `retry_on_status` option to retry on specific HTTP response statuses
+* Changed, that transports can close connections during `__rebuild_connections`
+* Added, that the Manticore adapter closes connections during reload ("sniffing")
+
+## 1.0.14
+
+* Clarified version support of Elasticsearch
+* Improved the `elasticsearch:build` Rake task to work with branch names
+
+### API
+
+* Added support for the `:ignore` option to the "Snapshot and Restore" API
+* Added support for the `:ignore` option to the Status API
+* Added the "Cat Nodeattrs" API
+* Added the "fields" parameter to the Bulk API
+* Added the "Render Search Template" API
+* Added the "Shard Stores" API
+* Added, that document ID is URL-escaped when percolating an existing document
+* Allow passing TEST_CLUSTER_PARAMS to the test cluster
+* Define the path to core REST tests dynamically based on Elasticsearch version
+* Fixed example in "Get Warmer" API
+* Fixed incorrect documentation and link in the "Clear Cache" API
+* Fixed integration tests for the "Snapshot and Restore" API
+* Fixed the incorrect path in "Field Stats" API and added support for the `body` argument
+* Fixed, that `type` is not added both to path and URL parameters in the Bulk API
+* Updated the examples in README and documentation (facets -> aggregations)
+
+### Client
+
+* Added an argument to control clearing out the testing cluster
+* Fixed, that reloading connections works with SSL, authentication and proxy/Shield
+* Highlight the need to set `retry_on_failure` option with multiple hosts in documentation
+
+## DSL:0.1.2
+
+* Added fuzziness option to the "Match" query
+* Added the `format` option to range filter and query
+* Added, that `*args` are passed to the Options initializer
+
+## EXT:0.0.19
+
+* Added `es.path.repo` to the testing cluster
+* Added `path_logs` option to test cluster
+* Added the `testattr` attribute to the testing cluster
+* Changed the default network host for the testing cluster to "localhost", to enable new "multicast"
+
+## 1.0.13
+
+### Client
+
+* Added, that connection reloading supports Elasticsearch 2.0 output
+* Improved thread safety in parts of connection handling code
+
+## DSL:1.0.1
+
+* Added additional option methods to the "Multi Match" query
+
## 1.0.12
### API
diff --git a/Gemfile b/Gemfile
index e496e18..ea36edf 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,7 +1,11 @@
source 'https://rubygems.org'
gem "bundler", "> 1"
-gem "rake"
+if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9'
+ gem "rake", "~> 11.1"
+else
+ gem "rake", "< 11.0"
+end
gem 'elasticsearch-api', :path => File.expand_path("../elasticsearch-api", __FILE__), :require => false
gem 'elasticsearch-transport', :path => File.expand_path("../elasticsearch-transport", __FILE__), :require => false
diff --git a/README.md b/README.md
index c565b7f..3203dbc 100644
--- a/README.md
+++ b/README.md
@@ -13,9 +13,9 @@ see the <https://github.com/elasticsearch/elasticsearch-rails> project.
The libraries are compatible with Ruby 1.8.7 and higher.
-The library is compatible with Elasticsearch 0.90 and 1.0 -- you have to install and use a matching version, though.
+The library is compatible with Elasticsearch 0.90, 1.x and 2.x -- you have to install and use a matching version, though.
-The 1.x versions and the master branch are compatible with **Elasticsearch 1.x** API.
+The 1.x versions and the master branch are compatible with Elasticsearch 1.x and 2.x APIs.
To use the **Elasticsearch 0.90** API, install the **0.4.x** gem version or use the corresponding
[`0.4`](https://github.com/elasticsearch/elasticsearch-ruby/tree/0.4) branch.
@@ -65,27 +65,20 @@ Both of these libraries are extensively documented.
**Please read the [`elasticsearch-transport`](http://rubydoc.info/gems/elasticsearch-transport)
and the [`elasticsearch-api`](http://rubydoc.info/gems/elasticsearch-api) documentation carefully.**
-Keep in mind, that for optimal performance, you should use a HTTP library which supports persistent
+_Keep in mind, that for optimal performance, you should use a HTTP library which supports persistent
("keep-alive") connections, e.g. [Patron](https://github.com/toland/patron) or
-[Typhoeus](https://github.com/typhoeus/typhoeus).
+[Typhoeus](https://github.com/typhoeus/typhoeus)._
-### Transport
+This repository contains these additional Ruby libraries:
-* [README](https://github.com/elasticsearch/elasticsearch-ruby/blob/master/elasticsearch-transport/README.md)
-* [Documentation](http://rubydoc.info/gems/elasticsearch-transport)
-* [Test Suite](https://github.com/elasticsearch/elasticsearch-ruby/blob/master/elasticsearch-transport/test)
+* [`elasticsearch-extensions`](https://github.com/elastic/elasticsearch-ruby/tree/master/elasticsearch-extensions),
+ which provides a set of extensions to the base library,
+* [`elasticsearch-dsl`](https://github.com/elastic/elasticsearch-ruby/tree/master/elasticsearch-dsl),
+ which provides a Ruby API for the [Elasticsearch Query DSL](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl.html),
+* [`elasticsearch-watcher`](https://github.com/elastic/elasticsearch-ruby/tree/master/elasticsearch-watcher),
+ which provides Ruby API for the [_Watcher_](https://www.elastic.co/products/watcher) plugin.
-### API
-
-* [README](https://github.com/elasticsearch/elasticsearch-ruby/blob/master/elasticsearch-api/README.md)
-* [Documentation](http://rubydoc.info/gems/elasticsearch-api/)
-* [Test Suite](https://github.com/elasticsearch/elasticsearch-ruby/blob/master/elasticsearch-api/test)
-
-### Extensions
-
-* [README](https://github.com/elasticsearch/elasticsearch-ruby/blob/master/elasticsearch-extensions/README.md)
-* [Documentation](http://rubydoc.info/gems/elasticsearch-extensions/)
-* [Test Suite](https://github.com/elasticsearch/elasticsearch-ruby/blob/master/elasticsearch-extensions/test)
+Please see their respective READMEs for information and documentation.
## Development
@@ -128,6 +121,9 @@ To run tests against unreleased Elasticsearch versions, you can use the `rake el
Rake task to build Elasticsearch from the cloned source
(use `rake elasticsearch:update` to update the repository):
+**Note:** If you have gems from the `elasticsearch` family installed system-wide,
+ and want to use development ones, prepend the command with `bundle exec`.
+
```
rake elasticsearch:build
```
diff --git a/Rakefile b/Rakefile
index 04d1a27..ce4007e 100644
--- a/Rakefile
+++ b/Rakefile
@@ -84,22 +84,68 @@ namespace :elasticsearch do
Rake::Task['elasticsearch:status'].invoke
puts '-'*80
- branch = args[:branch] || 'origin/master'
+ gitref = args[:branch] || 'origin/master'
+ es_version = gitref.gsub(/^v|origin\/(\d\.+)/, '\1').to_f
+
current_branch = `git --git-dir=#{__current__.join('tmp/elasticsearch/.git')} --work-tree=#{__current__.join('tmp/elasticsearch')} branch --no-color`.split("\n").select { |b| b =~ /^\*/ }.first.gsub(/^\*\s*/, '')
- begin
- sh <<-CODE
- mkdir -p #{__current__.join('tmp/builds')};
- rm -rf '#{__current__.join('tmp/elasticsearch/target')}';
- cd #{__current__.join('tmp/elasticsearch')} && git fetch origin --quiet;
- cd #{__current__.join('tmp/elasticsearch')} && git checkout #{branch};
- cd #{__current__.join('tmp/elasticsearch')} && mvn clean package -DskipTests;
- build=`ls #{__current__.join('tmp/elasticsearch/target/releases/elasticsearch-*.tar.gz')} | xargs -0 basename | sed s/\.tar\.gz//` && \
- rm -rf "#{__current__.join('tmp/builds')}/$build";
- tar xvf target/releases/elasticsearch-*.tar.gz -C #{__current__.join('tmp/builds')};
- echo; echo; echo "Built: $build"
- CODE
+
+ STDOUT.puts "Building version [#{es_version}] from [#{gitref}]:", ""
+
+ case es_version
+ when 0.0, 5..1000
+ path_to_build = __current__.join('tmp/elasticsearch/distribution/tar/build/distributions/elasticsearch-*.tar.gz')
+ build_command = "cd #{__current__.join('tmp/elasticsearch/distribution/tar')} && gradle clean assemble;"
+ extract_command = <<-CODE.gsub(/ /, '')
+ build=`ls #{path_to_build} | xargs -0 basename | sed s/\.tar\.gz//`
+ if [[ $build ]]; then
+ rm -rf "#{__current__.join('tmp/builds')}/$build";
+ else
+ echo "Cannot determine build, exiting..."
+ exit 1
+ fi
+ tar xvf #{path_to_build} -C #{__current__.join('tmp/builds')};
+ CODE
+ when 1.8..4
+ path_to_build = __current__.join('tmp/elasticsearch/distribution/tar/target/releases/elasticsearch-*.tar.gz')
+ build_command = "cd #{__current__.join('tmp/elasticsearch')} && MAVEN_OPTS=-Xmx1g mvn --projects core,distribution/tar clean package -DskipTests -Dskip.integ.tests;"
+ extract_command = <<-CODE.gsub(/ /, '')
+ build=`ls #{path_to_build} | xargs -0 basename | sed s/\.tar\.gz//`
+ if [[ $build ]]; then
+ rm -rf "#{__current__.join('tmp/builds')}/$build";
+ else
+ echo "Cannot determine build, exiting..."
+ exit 1
+ fi
+ tar xvf #{path_to_build} -C #{__current__.join('tmp/builds')};
+ CODE
+ when 0.1..1.7
+ path_to_build = __current__.join('tmp/elasticsearch/target/releases/elasticsearch-*.tar.gz')
+ build_command = "cd #{__current__.join('tmp/elasticsearch')} && MAVEN_OPTS=-Xmx1g mvn clean package -DskipTests"
+ extract_command = <<-CODE.gsub(/ /, '')
+ build=`ls #{path_to_build} | xargs -0 basename | sed s/\.tar\.gz//`
+ if [[ $build ]]; then
+ rm -rf "#{__current__.join('tmp/builds')}/$build";
+ else
+ echo "Cannot determine build, exiting..."
+ exit 1
+ fi
+ tar xvf #{path_to_build} -C #{__current__.join('tmp/builds')};
+ CODE
+ else
+ STDERR.puts "", "[!] Cannot determine a compatible version of the build (gitref: #{gitref}, es_version: #{es_version})"
+ exit(1)
end
+ sh <<-CODE.gsub(/ /, '')
+ mkdir -p #{__current__.join('tmp/builds')};
+ rm -rf '#{__current__.join('tmp/elasticsearch/distribution/tar/target/')}';
+ cd #{__current__.join('tmp/elasticsearch')} && git fetch origin --quiet;
+ cd #{__current__.join('tmp/elasticsearch')} && git checkout #{gitref};
+ #{build_command}
+ #{extract_command}
+ echo; echo; echo "Built: $build"
+ CODE
+
puts "", '-'*80, ""
Rake::Task['elasticsearch:builds'].invoke
end
@@ -111,6 +157,7 @@ namespace :elasticsearch do
desc "Display the list of builds"
task :builds do
+ system "mkdir -p #{__current__.join('tmp/builds')};"
puts "Builds:"
Dir.entries(__current__.join('tmp/builds')).reject { |f| f =~ /^\./ }.each do |build|
puts "* #{build}"
diff --git a/elasticsearch-api/Gemfile b/elasticsearch-api/Gemfile
index 899446d..a015200 100644
--- a/elasticsearch-api/Gemfile
+++ b/elasticsearch-api/Gemfile
@@ -3,14 +3,14 @@ source 'https://rubygems.org'
# Specify your gem's dependencies in elasticsearch-api.gemspec
gemspec
-if File.exists? File.expand_path("../../elasticsearch/elasticsearch.gemspec", __FILE__)
+if File.exist? File.expand_path("../../elasticsearch/elasticsearch.gemspec", __FILE__)
gem 'elasticsearch', :path => File.expand_path("../../elasticsearch", __FILE__), :require => false
end
-if File.exists? File.expand_path("../../elasticsearch-transport", __FILE__)
+if File.exist? File.expand_path("../../elasticsearch-transport", __FILE__)
gem 'elasticsearch-transport', :path => File.expand_path("../../elasticsearch-transport", __FILE__), :require => true
end
-if File.exists? File.expand_path("../../elasticsearch-extensions", __FILE__)
+if File.exist? File.expand_path("../../elasticsearch-extensions", __FILE__)
gem 'elasticsearch-extensions', :path => File.expand_path("../../elasticsearch-extensions", __FILE__), :require => false
end
diff --git a/elasticsearch-api/README.md b/elasticsearch-api/README.md
index 2ac3625..a7df8c0 100644
--- a/elasticsearch-api/README.md
+++ b/elasticsearch-api/README.md
@@ -14,9 +14,9 @@ library.
The library is compatible with Ruby 1.8.7 or higher.
-The library is compatible with Elasticsearch 0.90 and 1.0 -- you have to install and use a matching version, though.
+The library is compatible with Elasticsearch 0.90, 1.x and 2.x -- you have to install and use a matching version, though.
-The 1.x versions and the master branch are compatible with **Elasticsearch 1.x** API.
+The 1.x versions and the master branch are compatible with Elasticsearch 1.x and 2.x APIs.
To use the **Elasticsearch 0.90** API, install the **0.4.x** gem version or use the corresponding
[`0.4`](https://github.com/elasticsearch/elasticsearch-ruby/tree/0.4) branch.
@@ -143,7 +143,7 @@ require 'hashie'
response = client.search index: 'myindex',
body: {
query: { match: { title: 'test' } },
- facets: { tags: { terms: { field: 'tags' } } }
+ aggregations: { tags: { terms: { field: 'tags' } } }
}
mash = Hashie::Mash.new response
@@ -151,10 +151,22 @@ mash = Hashie::Mash.new response
mash.hits.hits.first._source.title
# => 'Test'
-mash.facets.tags.terms.first
+mash.aggregations.tags.terms.first
# => #<Hashie::Mash count=3 term="z">
```
+### Using a Custom JSON Serializer
+
+The library uses the [MultiJson](https://rubygems.org/gems/multi_json/) gem by default,
+but allows you to set a custom JSON library, provided it uses the standard `load/dump`
+interface:
+
+```ruby
+Elasticsearch::API.settings[:serializer] = JrJackson::Json
+Elasticsearch::API.serializer.dump({foo: 'bar'})
+# => {"foo":"bar"}
+```
+
## Development
To work on the code, clone and bootstrap the main repository first --
diff --git a/elasticsearch-api/Rakefile b/elasticsearch-api/Rakefile
index b249ed8..8781335 100644
--- a/elasticsearch-api/Rakefile
+++ b/elasticsearch-api/Rakefile
@@ -67,11 +67,12 @@ namespace :test do
# Check if a test cluster is running
begin
- client = Elasticsearch::Client.new :host => "localhost:#{ENV['TEST_CLUSTER_PORT'] || 9250}"
+ url = ENV.fetch('TEST_CLUSTER_URL', "http://localhost:#{ENV['TEST_CLUSTER_PORT'] || 9250}")
+ client = Elasticsearch::Client.new :url => url
es_version_info = client.info['version']
build_hash = es_version_info['build_hash']
cluster_running = true
- rescue Faraday::Error::ConnectionFailed => e
+ rescue Faraday::Error::ConnectionFailed
STDERR.puts "[!] Test cluster not running?"
cluster_running = false
end
@@ -97,9 +98,6 @@ namespace :test do
git_specs "checkout #{checkout_build_hash} --force --quiet"
end
- # Path to the API specs
- ENV['TEST_REST_API_SPEC'] = __current__.join('../tmp/elasticsearch/rest-api-spec/test/').to_s
-
# Run the task
args = [t.ruby_opts_string, t.run_code, t.file_list_string, t.option_list].join(' ')
diff --git a/elasticsearch-api/elasticsearch-api.gemspec b/elasticsearch-api/elasticsearch-api.gemspec
index 01ae615..d97b1b1 100644
--- a/elasticsearch-api/elasticsearch-api.gemspec
+++ b/elasticsearch-api/elasticsearch-api.gemspec
@@ -23,7 +23,12 @@ Gem::Specification.new do |s|
s.add_dependency "multi_json"
s.add_development_dependency "bundler", "> 1"
- s.add_development_dependency "rake"
+
+ if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9'
+ s.add_development_dependency "rake", "~> 11.1"
+ else
+ s.add_development_dependency "rake", "< 11.0"
+ end
s.add_development_dependency "elasticsearch"
s.add_development_dependency "elasticsearch-transport"
diff --git a/elasticsearch-api/lib/elasticsearch/api.rb b/elasticsearch-api/lib/elasticsearch/api.rb
index 326f2d5..d9d96e2 100644
--- a/elasticsearch-api/lib/elasticsearch/api.rb
+++ b/elasticsearch-api/lib/elasticsearch/api.rb
@@ -10,6 +10,8 @@ Dir[ File.expand_path('../api/namespace/**/*.rb', __FILE__) ].each { |f| require
module Elasticsearch
module API
+ DEFAULT_SERIALIZER = MultiJson
+
COMMON_PARAMS = [
:ignore, # Client specific parameters
:index, :type, :id, # :index/:type/:id
@@ -22,7 +24,8 @@ module Elasticsearch
COMMON_QUERY_PARAMS = [
:format, # Search, Cat, ...
:pretty, # Pretty-print the response
- :human # Return numeric values in human readable format
+ :human, # Return numeric values in human readable format
+ :filter_path # Filter the JSON response
]
HTTP_GET = 'GET'.freeze
@@ -42,19 +45,22 @@ module Elasticsearch
Elasticsearch::API::Cluster,
Elasticsearch::API::Nodes,
Elasticsearch::API::Indices,
+ Elasticsearch::API::Ingest,
Elasticsearch::API::Snapshot,
+ Elasticsearch::API::Tasks,
Elasticsearch::API::Cat
end
- module ClassMethods
-
- # Access the module settings
- #
- def settings
- @settings ||= {}
- end
+ # The serializer class
+ #
+ def self.serializer
+ settings[:serializer] || DEFAULT_SERIALIZER
end
- extend ClassMethods
+ # Access the module settings
+ #
+ def self.settings
+ @settings ||= {}
+ end
end
end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/abort_benchmark.rb b/elasticsearch-api/lib/elasticsearch/api/actions/abort_benchmark.rb
index 66278d9..19b5fbf 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/abort_benchmark.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/abort_benchmark.rb
@@ -13,8 +13,6 @@ module Elasticsearch
# @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/search-benchmark.html
#
def abort_benchmark(arguments={})
- valid_params = [
- ]
method = HTTP_POST
path = "_bench/abort/#{arguments[:name]}"
params = {}
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/bulk.rb b/elasticsearch-api/lib/elasticsearch/api/actions/bulk.rb
index f05ceaf..000695a 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/bulk.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/bulk.rb
@@ -4,8 +4,9 @@ module Elasticsearch
# Perform multiple index, delete or update operations in a single request.
#
- # Pass the operations in the `:body` option as an array of hashes, following Elasticsearch conventions.
- # For operations which take data, pass them as the `:data` option in the operation hash.
+ # Supports various different formats of the payload: Array of Strings, Header/Data pairs,
+ # or the conveniency "combined" format where data is passed along with the header
+ # in a single item in a custom `:data` key.
#
# @example Perform three operations in a single request, passing actions and data as an array of hashes
#
@@ -52,21 +53,33 @@ module Elasticsearch
# @option arguments [Boolean] :refresh Refresh the index after performing the operation
# @option arguments [String] :replication Explicitly set the replication type (options: sync, async)
# @option arguments [Time] :timeout Explicit operation timeout
+ # @option arguments [String] :fields Default comma-separated list of fields to return
+ # in the response for updates
+ # @options arguments [String] :pipeline The pipeline ID to use for preprocessing incoming documents
#
# @return [Hash] Deserialized Elasticsearch response
#
# @see http://elasticsearch.org/guide/reference/api/bulk/
#
def bulk(arguments={})
+ arguments = arguments.clone
+
+ type = arguments.delete(:type)
+
valid_params = [
:consistency,
:refresh,
:replication,
:type,
- :timeout ]
+ :timeout,
+ :fields,
+ :pipeline ]
+
+ unsupported_params = [ :fields, :pipeline ]
+ Utils.__report_unsupported_parameters(arguments, unsupported_params)
method = HTTP_POST
- path = Utils.__pathify Utils.__escape(arguments[:index]), Utils.__escape(arguments[:type]), '_bulk'
+ path = Utils.__pathify Utils.__escape(arguments[:index]), Utils.__escape(type), '_bulk'
params = Utils.__validate_and_extract_params arguments, valid_params
body = arguments[:body]
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/aliases.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/aliases.rb
index a25001e..6532ecd 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/aliases.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/aliases.rb
@@ -48,6 +48,9 @@ module Elasticsearch
:help,
:v ]
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
name = arguments.delete(:name)
method = HTTP_GET
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/allocation.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/allocation.rb
index 0419125..e4499dc 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/allocation.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/allocation.rb
@@ -50,6 +50,9 @@ module Elasticsearch
:help,
:v ]
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
node_id = arguments.delete(:node_id)
method = HTTP_GET
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/count.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/count.rb
index fea6766..ece0175 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/count.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/count.rb
@@ -44,6 +44,9 @@ module Elasticsearch
:help,
:v ]
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
index = arguments.delete(:index)
method = HTTP_GET
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/fielddata.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/fielddata.rb
index ea0e63b..67d1120 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/fielddata.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/fielddata.rb
@@ -34,6 +34,9 @@ module Elasticsearch
:v,
:fields ]
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
fields = arguments.delete(:fields)
method = HTTP_GET
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/health.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/health.rb
index 2af88ec..f3a7ef4 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/health.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/health.rb
@@ -37,6 +37,9 @@ module Elasticsearch
:ts,
:v ]
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
method = HTTP_GET
path = "_cat/health"
params = Utils.__validate_and_extract_params arguments, valid_params
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/indices.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/indices.rb
index 921e6b3..72245f4 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/indices.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/indices.rb
@@ -58,6 +58,9 @@ module Elasticsearch
:pri,
:v ]
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
index = arguments.delete(:index)
method = HTTP_GET
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/master.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/master.rb
index c8b0ec9..487e471 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/master.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/master.rb
@@ -35,6 +35,9 @@ module Elasticsearch
:help,
:v ]
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
method = HTTP_GET
path = "_cat/master"
params = Utils.__validate_and_extract_params arguments, valid_params
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/plugins.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/nodeattrs.rb
similarity index 70%
copy from elasticsearch-api/lib/elasticsearch/api/actions/cat/plugins.rb
copy to elasticsearch-api/lib/elasticsearch/api/actions/cat/nodeattrs.rb
index 2d4ea4f..5fafcc4 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/plugins.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/nodeattrs.rb
@@ -3,26 +3,29 @@ module Elasticsearch
module Cat
module Actions
- # Return information about installed plugins
+ # Display custom node attributes
#
- # @option arguments [Boolean] :local Return local information, do not retrieve the state from master node
- # (default: false)
+ # @option arguments [Boolean] :local Return local information, do not retrieve the state from master node (default: false)
# @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
# @option arguments [List] :h Comma-separated list of column names to display
# @option arguments [Boolean] :help Return help information
# @option arguments [Boolean] :v Verbose mode. Display column headers
#
- # @see http://www.elastic.co/guide/en/elasticsearch/reference/master/cat-plugins.html
+ # @see http://www.elastic.co/guide/en/elasticsearch/reference/master/cat-nodeattrs.html
#
- def plugins(arguments={})
+ def nodeattrs(arguments={})
valid_params = [
:local,
:master_timeout,
:h,
:help,
:v ]
- method = 'GET'
- path = "/_cat/plugins"
+
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
+ method = HTTP_GET
+ path = "_cat/nodeattrs"
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/nodes.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/nodes.rb
index 64b5fa3..658bcb3 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/nodes.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/nodes.rb
@@ -43,11 +43,14 @@ module Elasticsearch
:help,
:v ]
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
method = HTTP_GET
path = "_cat/nodes"
params = Utils.__validate_and_extract_params arguments, valid_params
- params[:h] = Utils.__listify(params[:h]) if params[:h]
+ params[:h] = Utils.__listify(params[:h], :escape => false) if params[:h]
body = nil
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/pending_tasks.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/pending_tasks.rb
index d1f044d..db2c1f0 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/pending_tasks.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/pending_tasks.rb
@@ -35,6 +35,9 @@ module Elasticsearch
:help,
:v ]
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
method = HTTP_GET
path = "_cat/pending_tasks"
params = Utils.__validate_and_extract_params arguments, valid_params
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/plugins.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/plugins.rb
index 2d4ea4f..343516e 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/plugins.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/plugins.rb
@@ -21,8 +21,12 @@ module Elasticsearch
:h,
:help,
:v ]
- method = 'GET'
- path = "/_cat/plugins"
+
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
+ method = HTTP_GET
+ path = "_cat/plugins"
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/recovery.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/recovery.rb
index 672141d..3ad0e59 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/recovery.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/recovery.rb
@@ -54,6 +54,9 @@ module Elasticsearch
:help,
:v ]
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
index = arguments.delete(:index)
method = HTTP_GET
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/plugins.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/repositories.rb
similarity index 54%
copy from elasticsearch-api/lib/elasticsearch/api/actions/cat/plugins.rb
copy to elasticsearch-api/lib/elasticsearch/api/actions/cat/repositories.rb
index 2d4ea4f..81a8d75 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/plugins.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/repositories.rb
@@ -3,26 +3,35 @@ module Elasticsearch
module Cat
module Actions
- # Return information about installed plugins
+ # Shows all repositories registered in a cluster
+ #
+ # @example Return list of repositories
+ #
+ # client.cat.repositories
+ #
+ # @example Return only id for each repository
+ #
+ # client.cat.repositories h: 'id'
#
- # @option arguments [Boolean] :local Return local information, do not retrieve the state from master node
- # (default: false)
# @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
# @option arguments [List] :h Comma-separated list of column names to display
# @option arguments [Boolean] :help Return help information
# @option arguments [Boolean] :v Verbose mode. Display column headers
#
- # @see http://www.elastic.co/guide/en/elasticsearch/reference/master/cat-plugins.html
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-repositories.html
#
- def plugins(arguments={})
+ def repositories(arguments={})
valid_params = [
- :local,
:master_timeout,
:h,
:help,
:v ]
- method = 'GET'
- path = "/_cat/plugins"
+
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
+ method = HTTP_GET
+ path = "_cat/repositories"
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/segments.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/segments.rb
index 5fddca3..abe1c90 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/segments.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/segments.rb
@@ -10,6 +10,7 @@ module Elasticsearch
# puts client.cat.segments
#
# @option arguments [List] :index A comma-separated list of index names to limit the returned information
+ # @option arguments [String] :bytes The unit in which to display byte values (options: b, k, m, g)
# @option arguments [List] :h Comma-separated list of column names to display
# @option arguments [Boolean] :help Return help information
# @option arguments [Boolean] :v Verbose mode. Display column headers
@@ -18,10 +19,15 @@ module Elasticsearch
#
def segments(arguments={})
valid_params = [
+ :bytes,
:h,
:help,
:v ]
- method = 'GET'
+
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
+ method = HTTP_GET
path = "_cat/segments"
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/shards.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/shards.rb
index e885bce..61a5c5d 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/shards.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/shards.rb
@@ -21,6 +21,10 @@ module Elasticsearch
#
# puts client.cat.shards v: true
#
+ # @example Display shard size in choice of units
+ #
+ # puts client.cat.shards bytes: 'b'
+ #
# @example Display only specific columns in the output (see the `help` parameter)
#
# puts client.cat.shards h: ['node', 'index', 'shard', 'prirep', 'docs', 'store', 'merges.total']
@@ -49,10 +53,14 @@ module Elasticsearch
valid_params = [
:local,
:master_timeout,
+ :bytes,
:h,
:help,
:v ]
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
index = arguments.delete(:index)
method = HTTP_GET
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/snapshots.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/snapshots.rb
new file mode 100644
index 0000000..f470ddd
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/snapshots.rb
@@ -0,0 +1,48 @@
+module Elasticsearch
+ module API
+ module Cat
+ module Actions
+
+ # Shows all snapshots that belong to a specific repository
+ #
+ # @example Return snapshots for 'my_repository'
+ #
+ # client.cat.snapshots repository: 'my_repository'
+ #
+ # @example Return id, status and start_epoch for 'my_repository'
+ #
+ # client.cat.snapshots repository: 'my_repository', h: 'id,status,start_epoch'
+ #
+ # @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
+ # @option arguments [List] :h Comma-separated list of column names to display
+ # @option arguments [Boolean] :help Return help information
+ # @option arguments [Boolean] :v Verbose mode. Display column headers
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-snapshots.html
+ #
+ def snapshots(arguments={})
+ raise ArgumentError, "Required argument 'repository' missing" unless arguments[:repository]
+
+ valid_params = [
+ :master_timeout,
+ :h,
+ :help,
+ :v ]
+
+ unsupported_params = [ :format ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
+ repository = arguments.delete(:repository)
+
+ method = HTTP_GET
+ path = Utils.__pathify "_cat/snapshots", Utils.__escape(repository)
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = nil
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
+
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/tasks.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/tasks.rb
new file mode 100644
index 0000000..c9f2ce4
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/tasks.rb
@@ -0,0 +1,45 @@
+module Elasticsearch
+ module API
+ module Cat
+ module Actions
+
+ # Return currently running tasks
+ #
+ # @option arguments [String] :format a short version of the Accept header, e.g. json, yaml
+ # @option arguments [List] :node_id A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes
+ # @option arguments [List] :actions A comma-separated list of actions that should be returned. Leave empty to return all.
+ # @option arguments [Boolean] :detailed Return detailed task information (default: false)
+ # @option arguments [String] :parent_node Return tasks with specified parent node.
+ # @option arguments [Number] :parent_task Return tasks with specified parent task id. Set to -1 to return all.
+ # @option arguments [List] :h Comma-separated list of column names to display
+ # @option arguments [Boolean] :help Return help information
+ # @option arguments [Boolean] :v Verbose mode. Display column headers
+ #
+ # @see http://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html
+ #
+ def tasks(arguments={})
+ Utils.__report_unsupported_method(__method__)
+
+ valid_params = [
+ :format,
+ :node_id,
+ :actions,
+ :detailed,
+ :parent_node,
+ :parent_task,
+ :h,
+ :help,
+ :v ]
+
+ method = HTTP_GET
+
+ path = "_cat/tasks"
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = nil
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cat/thread_pool.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cat/thread_pool.rb
index 6a8e7a8..5c5c5b0 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cat/thread_pool.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cat/thread_pool.rb
@@ -23,6 +23,8 @@ module Elasticsearch
# puts client.cat.thread_pool h: 'host,ia,is,iq,ir,sa,ss,sq,sr', v: true
#
# @option arguments [Boolean] :full_id Display the complete node ID
+ # @option arguments [String] :size The multiplier in which to display values
+ # (Options: k, m, g, t, p)
# @option arguments [List] :h Comma-separated list of column names to display -- see the `help` argument
# @option arguments [Boolean] :v Display column headers as part of the output
# @option arguments [String] :format The output format. Options: 'text', 'json'; default: 'text'
@@ -36,12 +38,16 @@ module Elasticsearch
def thread_pool(arguments={})
valid_params = [
:full_id,
+ :size,
:local,
:master_timeout,
:h,
:help,
:v ]
+ unsupported_params = [ :format, :size ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
method = HTTP_GET
path = "_cat/thread_pool"
params = Utils.__validate_and_extract_params arguments, valid_params
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cluster/allocation_explain.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/allocation_explain.rb
new file mode 100644
index 0000000..0d4ba7f
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/allocation_explain.rb
@@ -0,0 +1,28 @@
+module Elasticsearch
+ module API
+ module Cluster
+ module Actions
+
+ # Return the information about why a shard is or isn't allocated
+ #
+ # @option arguments [Hash] :body The index, shard, and primary flag to explain. Empty means 'explain the first unassigned shard'
+ # @option arguments [Boolean] :include_yes_decisions Return 'YES' decisions in explanation (default: false)
+ #
+ # @see http://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-allocation-explain.html
+ #
+ def allocation_explain(arguments={})
+ Utils.__report_unsupported_method(__method__)
+
+ valid_params = [ :include_yes_decisions ]
+
+ method = HTTP_GET
+ path = "_cluster/allocation/explain"
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = arguments[:body]
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cluster/get_settings.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/get_settings.rb
index eaa9b88..3aa59da 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cluster/get_settings.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/get_settings.rb
@@ -10,14 +10,20 @@ module Elasticsearch
# client.cluster.get_settings
#
# @option arguments [Boolean] :flat_settings Return settings in flat format (default: false)
+ # @option arguments [Boolean] :include_defaults Whether to return all default clusters setting
+ # (default: false)
#
# @see http://elasticsearch.org/guide/reference/api/admin-cluster-update-settings/
#
def get_settings(arguments={})
valid_params = [
- :flat_settings
+ :flat_settings,
+ :include_defaults
]
+ unsupported_params = [ :include_defaults ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
method = HTTP_GET
path = "_cluster/settings"
params = Utils.__validate_and_extract_params arguments, valid_params
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cluster/health.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/health.rb
index 4f14a45..b681f33 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cluster/health.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/health.rb
@@ -30,6 +30,9 @@ module Elasticsearch
# @see http://elasticsearch.org/guide/reference/api/admin-cluster-health/
#
def health(arguments={})
+ arguments = arguments.clone
+ index = arguments.delete(:index)
+
valid_params = [
:level,
:local,
@@ -41,7 +44,7 @@ module Elasticsearch
:wait_for_status ]
method = HTTP_GET
- path = "_cluster/health"
+ path = Utils.__pathify "_cluster/health", Utils.__listify(index)
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cluster/pending_tasks.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/pending_tasks.rb
index ec75a9e..e48787d 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cluster/pending_tasks.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/pending_tasks.rb
@@ -21,7 +21,7 @@ module Elasticsearch
:local,
:master_timeout ]
method = HTTP_GET
- path = "/_cluster/pending_tasks"
+ path = "_cluster/pending_tasks"
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cluster/reroute.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/reroute.rb
index 3ea9a6a..77a9893 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/cluster/reroute.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/reroute.rb
@@ -26,11 +26,16 @@ module Elasticsearch
# Defaults to all but metadata. (Options: _all, blocks, metadata,
# nodes, routing_table, master_node, version)
# @option arguments [Time] :master_timeout Specify timeout for connection to master
+ # @option arguments [Boolean] :retry_failed Retries allocation of shards that are blocked due to too many
+ # subsequent allocation failures
#
# @see http://elasticsearch.org/guide/reference/api/admin-cluster-reroute/
#
def reroute(arguments={})
- valid_params = [ :dry_run, :explain, :metric, :master_timeout, :timeout ]
+ valid_params = [ :dry_run, :explain, :metric, :master_timeout, :retry_failed, :timeout ]
+
+ unsupported_params = [ :retry_failed ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
method = HTTP_POST
path = "_cluster/reroute"
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/cluster/stats.rb b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/stats.rb
new file mode 100644
index 0000000..df935b7
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/cluster/stats.rb
@@ -0,0 +1,30 @@
+module Elasticsearch
+ module API
+ module Cluster
+ module Actions
+
+ # Returns statistical information about the cluster
+ #
+ # @option arguments [List] :node_id A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes
+ # @option arguments [Boolean] :flat_settings Return settings in flat format (default: false)
+ # @option arguments [Boolean] :human Whether to return time and byte values in human-readable format.
+ # @option arguments [Time] :timeout Explicit operation timeout
+ #
+ # @see http://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-stats.html
+ #
+ def stats(arguments={})
+ valid_params = [
+ :flat_settings,
+ :human,
+ :timeout ]
+ method = 'GET'
+ path = "_cluster/stats"
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = nil
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/delete_by_query.rb b/elasticsearch-api/lib/elasticsearch/api/actions/delete_by_query.rb
index 8483b10..cc61a30 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/delete_by_query.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/delete_by_query.rb
@@ -43,6 +43,8 @@ module Elasticsearch
# @see http://www.elasticsearch.org/guide/reference/api/delete-by-query/
#
def delete_by_query(arguments={})
+ Utils.__report_unsupported_method(__method__)
+
raise ArgumentError, "Required argument 'index' missing" unless arguments[:index]
valid_params = [
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/field_stats.rb b/elasticsearch-api/lib/elasticsearch/api/actions/field_stats.rb
index 8384f3c..e87426c 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/field_stats.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/field_stats.rb
@@ -21,9 +21,9 @@ module Elasticsearch
:allow_no_indices,
:expand_wildcards ]
method = 'GET'
- path = "_field_stats"
+ path = Utils.__pathify Utils.__escape(arguments[:index]), "_field_stats"
params = Utils.__validate_and_extract_params arguments, valid_params
- body = nil
+ body = arguments[:body]
perform_request(method, path, params, body).body
end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/index.rb b/elasticsearch-api/lib/elasticsearch/api/actions/index.rb
index 8ac2145..d15bc9d 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/index.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/index.rb
@@ -79,6 +79,7 @@ module Elasticsearch
:op_type,
:parent,
:percolate,
+ :pipeline,
:refresh,
:replication,
:routing,
@@ -88,6 +89,9 @@ module Elasticsearch
:version,
:version_type ]
+ unsupported_params = [ :pipeline ]
+ Utils.__report_unsupported_parameters(arguments, unsupported_params)
+
method = arguments[:id] ? HTTP_PUT : HTTP_POST
path = Utils.__pathify Utils.__escape(arguments[:index]),
Utils.__escape(arguments[:type]),
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/analyze.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/analyze.rb
index d8aa2ad..6db0203 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/analyze.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/analyze.rb
@@ -36,6 +36,8 @@ module Elasticsearch
# @option arguments [List] :filters A comma-separated list of token filters to use for the analysis.
# (Also available as the `:token_filters` option)
# @option arguments [List] :char_filters A comma-separated list of char filters to use for the analysis
+ # @option arguments [Boolean] :explain Whether to output further details (default: false)
+ # @option arguments [List] :attributes A comma-separated list of token attributes to output (use with `:explain`)
# @option arguments [String] :index The name of the index to scope the operation
# @option arguments [Boolean] :prefer_local With `true`, specify that a local shard should be used if available,
# with `false`, use a random shard (default: true)
@@ -50,8 +52,11 @@ module Elasticsearch
valid_params = [
:analyzer,
:char_filters,
+ :explain,
+ :attributes,
:field,
:filters,
+ :filter,
:index,
:prefer_local,
:text,
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/clear_cache.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/clear_cache.rb
index 0dc5b0a..0401527 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/clear_cache.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/clear_cache.rb
@@ -34,7 +34,7 @@ module Elasticsearch
# `field_data` parameter(default: all)
# @option arguments [Boolean] :filter Clear filter caches
# @option arguments [Boolean] :filter_cache Clear filter caches
- # @option arguments [Boolean] :filter_keys A comma-separated list of keys to clear when using the
+ # @option arguments [List] :filter_keys A comma-separated list of keys to clear when using the
# `filter_cache` parameter (default: all)
# @option arguments [Boolean] :id Clear ID caches for parent/child
# @option arguments [Boolean] :id_cache Clear ID caches for parent/child
@@ -45,7 +45,7 @@ module Elasticsearch
# @option arguments [List] :index A comma-separated list of index name to limit the operation
# @option arguments [Boolean] :recycler Clear the recycler cache
#
- # @see http://www.elasticsearch.org/guide/reference/api/admin-indices-clearcache/
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-clearcache.html
#
def clear_cache(arguments={})
valid_params = [
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/close.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/close.rb
index 5f43eb6..4213680 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/close.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/close.rb
@@ -11,7 +11,8 @@ module Elasticsearch
#
# client.indices.close index: 'myindex'
#
- # @option arguments [String] :index The name of the index (*Required*)
+ # @option arguments [List] :index A comma separated list of indices to perform the operation on
+ # (*Required*)
# @option arguments [Boolean] :allow_no_indices Whether to ignore if a wildcard indices expression resolves into
# no concrete indices. (This includes `_all` string or when no
# indices have been specified)
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/create.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/create.rb
index da61849..fc75cd3 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/create.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/create.rb
@@ -60,13 +60,20 @@ module Elasticsearch
#
# @option arguments [String] :index The name of the index (*Required*)
# @option arguments [Hash] :body Optional configuration for the index (`settings` and `mappings`)
+ # @option arguments [Boolean] :update_all_types Whether to update the mapping for all fields
+ # with the same name across all types
# @option arguments [Time] :timeout Explicit operation timeout
+ # @option arguments [Boolean] :master_timeout Timeout for connection to master
#
# @see http://www.elasticsearch.org/guide/reference/api/admin-indices-create-index/
#
def create(arguments={})
raise ArgumentError, "Required argument 'index' missing" unless arguments[:index]
- valid_params = [ :timeout ]
+ valid_params = [
+ :timeout,
+ :master_timeout,
+ :update_all_types
+ ]
method = HTTP_PUT
path = Utils.__pathify Utils.__escape(arguments[:index])
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/exists_type.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/exists_type.rb
index 5dd7aa4..3c0692e 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/exists_type.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/exists_type.rb
@@ -5,7 +5,7 @@ module Elasticsearch
# Return true if the specified type exists, false otherwise.
#
- # client.indices.exists_type? type: 'mytype'
+ # client.indices.exists_type? index: 'myindex', type: 'mytype'
#
# @option arguments [List] :index A comma-separated list of index names; use `_all`
# to check the types across all indices (*Required*)
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/flush_synced.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/flush_synced.rb
new file mode 100644
index 0000000..98d4da4
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/flush_synced.rb
@@ -0,0 +1,35 @@
+module Elasticsearch
+ module API
+ module Indices
+ module Actions
+
+ # @option arguments [List] :index A comma-separated list of index names; use `_all` or empty string for all indices
+ # @option arguments [Boolean] :ignore_unavailable Whether specified concrete indices should be ignored when unavailable (missing or closed)
+ # @option arguments [Boolean] :allow_no_indices Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)
+ # @option arguments [String] :expand_wildcards Whether to expand wildcard expression to concrete indices that are open, closed or both. (options: open, closed, none, all)
+ #
+ # @see http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-flush.html
+ #
+ def flush_synced(arguments={})
+ valid_params = [
+ :ignore_unavailable,
+ :allow_no_indices,
+ :expand_wildcards
+ ]
+
+ method = HTTP_POST
+ path = Utils.__pathify Utils.__listify(arguments[:index]), '_flush/synced'
+
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = nil
+
+ if Array(arguments[:ignore]).include?(404)
+ Utils.__rescue_from_not_found { perform_request(method, path, params, body).body }
+ else
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/forcemerge.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/forcemerge.rb
new file mode 100644
index 0000000..ae99ce8
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/forcemerge.rb
@@ -0,0 +1,62 @@
+module Elasticsearch
+ module API
+ module Indices
+ module Actions
+
+ # Force merge an index, list of indices, or all indices in the cluster.
+ #
+ # @example Fully force merge an index
+ #
+ # client.indices.forcemerge index: 'foo', max_num_segments: 1
+ #
+ # @example Do not flush index after force-merging
+ #
+ # client.indices.forcemerge index: 'foo', flush: false
+ #
+ # @example Do not expunge deleted documents after force-merging
+ #
+ # client.indices.forcemerge index: 'foo', only_expunge_deletes: false
+ #
+ # @example Force merge a list of indices
+ #
+ # client.indices.forcemerge index: ['foo', 'bar']
+ # client.indices.forcemerge index: 'foo,bar'
+ #
+ # @example forcemerge a list of indices matching wildcard expression
+ #
+ # client.indices.forcemerge index: 'foo*'
+ #
+ # @example forcemerge all indices
+ #
+ # client.indices.forcemerge index: '_all'
+ #
+ # @option arguments [List] :index A comma-separated list of indices to forcemerge;
+ # use `_all` to forcemerge all indices
+ # @option arguments [Number] :max_num_segments The number of segments the index should be merged into
+ # (default: dynamic)
+ # @option arguments [Boolean] :only_expunge_deletes Specify whether the operation should only expunge
+ # deleted documents
+ # @option arguments [Boolean] :flush Specify whether the index should be flushed after performing the operation
+ # (default: true)
+ #
+ # @see http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-forcemerge.html
+ #
+ def forcemerge(arguments={})
+ valid_params = [
+ :max_num_segments,
+ :only_expunge_deletes,
+ :flush
+ ]
+
+ method = HTTP_POST
+ path = Utils.__pathify Utils.__listify(arguments[:index]), '_forcemerge'
+
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = nil
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/get.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/get.rb
index 4e26b61..7803b1e 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/get.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/get.rb
@@ -7,6 +7,7 @@ module Elasticsearch
#
# @option arguments [List] :index A comma-separated list of index names (*Required*)
# @option arguments [List] :feature A comma-separated list of features
+ # (options: _settings, _mappings, _aliases]
# @option arguments [Boolean] :local Return local information, do not retrieve the state from master node
# (default: false)
# @option arguments [Boolean] :ignore_unavailable Ignore unavailable indexes (default: false)
@@ -14,6 +15,11 @@ module Elasticsearch
# indices (default: false)
# @option arguments [List] :expand_wildcards Whether wildcard expressions should get expanded
# to open or closed indices (default: open)
+ # @option arguments [Boolean] :flat_settings Return settings in flat format (default: false)
+ # @option arguments [Boolean] :human Whether to return version and creation date values in
+ # human-readable format (default: false)
+ # @option arguments [Boolean] :include_defaults Whether to return all default setting
+ # for each of the indices (default:false)
#
# @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/indices-get-index.html
#
@@ -24,7 +30,13 @@ module Elasticsearch
:local,
:ignore_unavailable,
:allow_no_indices,
- :expand_wildcards ]
+ :expand_wildcards,
+ :flat_settings,
+ :human,
+ :include_defaults ]
+
+ unsupported_params = [ :include_defaults ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
method = HTTP_GET
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_field_mapping.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_field_mapping.rb
index 186f452..7f254fb 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_field_mapping.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_field_mapping.rb
@@ -34,7 +34,11 @@ module Elasticsearch
# @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-get-field-mapping.html
#
def get_field_mapping(arguments={})
- raise ArgumentError, "Required argument 'field' missing" unless arguments[:field]
+ arguments = arguments.clone
+
+ fields = arguments.delete(:field) || arguments.delete(:fields)
+ raise ArgumentError, "Required argument 'field' missing" unless fields
+
valid_params = [
:include_defaults,
:ignore_indices,
@@ -49,7 +53,7 @@ module Elasticsearch
'_mapping',
Utils.__listify(arguments[:type]),
'field',
- Utils.__listify(arguments[:field])
+ Utils.__listify(fields)
)
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_settings.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_settings.rb
index f4b63b2..99ac5ed 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_settings.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_settings.rb
@@ -34,6 +34,7 @@ module Elasticsearch
# `missing` ones (options: none, missing) @until 1.0
# @option arguments [Boolean] :ignore_unavailable Whether specified concrete indices should be ignored when
# unavailable (missing, closed, etc)
+ # @option arguments [Boolean] :include_defaults Whether to return all default clusters setting
# @option arguments [Boolean] :flat_settings Return settings in flat format (default: false)
# @option arguments [Boolean] :local Return local information, do not retrieve the state from master node
# (default: false)
@@ -45,12 +46,16 @@ module Elasticsearch
:prefix,
:ignore_indices,
:ignore_unavailable,
+ :include_defaults,
:allow_no_indices,
:expand_wildcards,
:flat_settings,
:local
]
+ unsupported_params = [ :include_defaults ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
method = HTTP_GET
path = Utils.__pathify Utils.__listify(arguments[:index]),
Utils.__listify(arguments[:type]),
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_warmer.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_warmer.rb
index 7cf719a..82b4aa0 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_warmer.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/get_warmer.rb
@@ -43,12 +43,13 @@ module Elasticsearch
:ignore_indices,
:ignore_unavailable,
:allow_no_indices,
- :expand_wildcards
+ :expand_wildcards,
+ :local
]
method = HTTP_GET
path = Utils.__pathify( Utils.__listify(arguments[:index]), '_warmer', Utils.__escape(arguments[:name]) )
- params = {}
+ params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
perform_request(method, path, params, body).body
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/open.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/open.rb
index 1b0f242..3cf4a76 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/open.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/open.rb
@@ -9,7 +9,8 @@ module Elasticsearch
#
# client.indices.open index: 'myindex'
#
- # @option arguments [String] :index The name of the index (*Required*)
+ # @option arguments [List] :index A comma separated list of indices to perform the operation on
+ # (*Required*)
# @option arguments [Boolean] :allow_no_indices Whether to ignore if a wildcard indices expression resolves into
# no concrete indices. (This includes `_all` string or when no
# indices have been specified)
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/optimize.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/optimize.rb
index 4e205cf..13ff020 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/optimize.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/optimize.rb
@@ -8,6 +8,8 @@ module Elasticsearch
# The "optimize" operation merges the index segments, increasing search performance.
# It corresponds to a Lucene "merge" operation.
#
+ # @deprecated The "optimize" action has been deprecated in favor of forcemerge [https://github.com/elastic/elasticsearch/pull/13778]
+ #
# @example Fully optimize an index (merge to one segment)
#
# client.indices.optimize index: 'foo', max_num_segments: 1, wait_for_merge: false
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/put_mapping.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/put_mapping.rb
index b7f7046..df598d2 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/put_mapping.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/put_mapping.rb
@@ -40,8 +40,10 @@ module Elasticsearch
# `missing` ones (options: none, missing) @until 1.0
# @option arguments [Boolean] :ignore_unavailable Whether specified concrete indices should be ignored when
# unavailable (missing, closed, etc)
- # @option arguments [Time] :master_timeout Specify timeout for connection to master
+ # @option arguments [Boolean] :update_all_types Whether to update the mapping for all fields
+ # with the same name across all types
# @option arguments [Time] :timeout Explicit operation timeout
+ # @option arguments [Boolean] :master_timeout Timeout for connection to master
#
# @see http://www.elasticsearch.org/guide/reference/api/admin-indices-put-mapping/
#
@@ -55,6 +57,7 @@ module Elasticsearch
:ignore_unavailable,
:allow_no_indices,
:expand_wildcards,
+ :update_all_types,
:master_timeout,
:timeout
]
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/put_settings.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/put_settings.rb
index 04e951c..5440789 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/put_settings.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/put_settings.rb
@@ -36,6 +36,10 @@ module Elasticsearch
# `missing` ones (options: none, missing) @until 1.0
# @option arguments [Boolean] :ignore_unavailable Whether specified concrete indices should be ignored when
# unavailable (missing, closed, etc)
+ # @option arguments [Boolean] :include_defaults Whether to return all default clusters setting
+ # @option arguments [Boolean] :preserve_existing Whether to update existing settings.
+ # If set to `true` existing settings on an index remain
+ # unchanged, the default is `false`
# @option arguments [Time] :master_timeout Specify timeout for connection to master
# @option arguments [Boolean] :flat_settings Return settings in flat format (default: false)
#
@@ -47,12 +51,17 @@ module Elasticsearch
valid_params = [
:ignore_indices,
:ignore_unavailable,
+ :include_defaults,
:allow_no_indices,
:expand_wildcards,
+ :preserve_existing,
:master_timeout,
:flat_settings
]
+ unsupported_params = [ :preserve_existing ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
method = HTTP_PUT
path = Utils.__pathify Utils.__listify(arguments[:index]), '_settings'
params = Utils.__validate_and_extract_params arguments, valid_params
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/seal.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/seal.rb
index 228dbe1..4579c1c 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/seal.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/seal.rb
@@ -11,8 +11,6 @@ module Elasticsearch
# @see http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-seal.html
#
def seal(arguments={})
- valid_params = [
- ]
method = 'POST'
path = Utils.__pathify Utils.__listify(arguments[:index]), '_seal'
params = {}
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/segments.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/segments.rb
index 9428a8e..d5399e4 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/segments.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/segments.rb
@@ -10,15 +10,16 @@ module Elasticsearch
#
# @option arguments [List] :index A comma-separated list of index names; use `_all` or empty string
# to perform the operation on all indices
- # @option arguments [Boolean] :allow_no_indices Whether to ignore if a wildcard indices expression resolves into
- # no concrete indices. (This includes `_all` string or when no
- # indices have been specified)
- # @option arguments [String] :expand_wildcards Whether to expand wildcard expression to concrete indices that
- # are open, closed or both. (options: open, closed)
+ # @option arguments [Boolean] :allow_no_indices Whether to ignore if a wildcard indices expression
+ # resolves into no concrete indices. (This includes `_all`
+ # string or when no indices have been specified)
+ # @option arguments [String] :expand_wildcards Whether to expand wildcard expression to concrete indices
+ # that are open, closed or both. (options: open, closed)
# @option arguments [String] :ignore_indices When performed on multiple indices, allows to ignore
# `missing` ones (options: none, missing) @until 1.0
# @option arguments [Boolean] :ignore_unavailable Whether specified concrete indices should be ignored when
# unavailable (missing, closed, etc)
+ # @option arguments [Boolean] :verbose Whether to include detailed memory usage by Lucene (default: false)
#
# @see http://elasticsearch.org/guide/reference/api/admin-indices-segments/
#
@@ -27,7 +28,8 @@ module Elasticsearch
:ignore_indices,
:ignore_unavailable,
:allow_no_indices,
- :expand_wildcards
+ :expand_wildcards,
+ :verbose
]
method = HTTP_GET
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/shard_stores.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/shard_stores.rb
new file mode 100644
index 0000000..1aa5c17
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/shard_stores.rb
@@ -0,0 +1,34 @@
+module Elasticsearch
+ module API
+ module Indices
+ module Actions
+
+ # Provides low-level information about shards (allocated nodes, exceptions, ...)
+ #
+ # @option arguments [List] :index A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices
+ # @option arguments [List] :status A comma-separated list of statuses used to filter on shards to get store information for (options: green, yellow, red, all)
+ # @option arguments [Boolean] :ignore_unavailable Whether specified concrete indices should be ignored when unavailable (missing or closed)
+ # @option arguments [Boolean] :allow_no_indices Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)
+ # @option arguments [String] :expand_wildcards Whether to expand wildcard expression to concrete indices that are open, closed or both. (options: open, closed, none, all)
+ # @option arguments [String] :operation_threading
+ #
+ # @see http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-shards-stores.html
+ #
+ def shard_stores(arguments={})
+ valid_params = [
+ :status,
+ :ignore_unavailable,
+ :allow_no_indices,
+ :expand_wildcards,
+ :operation_threading ]
+ method = 'GET'
+ path = Utils.__pathify Utils.__escape(arguments[:index]), "_shard_stores"
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = nil
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/indices/status.rb b/elasticsearch-api/lib/elasticsearch/api/actions/indices/status.rb
index 1fe741e..3087071 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/indices/status.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/indices/status.rb
@@ -48,7 +48,11 @@ module Elasticsearch
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
- perform_request(method, path, params, body).body
+ if Array(arguments[:ignore]).include?(404)
+ Utils.__rescue_from_not_found { perform_request(method, path, params, body).body }
+ else
+ perform_request(method, path, params, body).body
+ end
end
end
end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/ingest/delete_pipeline.rb b/elasticsearch-api/lib/elasticsearch/api/actions/ingest/delete_pipeline.rb
new file mode 100644
index 0000000..71b7349
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/ingest/delete_pipeline.rb
@@ -0,0 +1,33 @@
+module Elasticsearch
+ module API
+ module Ingest
+ module Actions
+
+ # Delete a speficied pipeline
+ #
+ # @option arguments [String] :id Pipeline ID (*Required*)
+ # @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
+ # @option arguments [Time] :timeout Explicit operation timeout
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/plugins/master/ingest.html
+ #
+ def delete_pipeline(arguments={})
+ Utils.__report_unsupported_method(__method__)
+
+ raise ArgumentError, "Required argument 'id' missing" unless arguments[:id]
+
+ valid_params = [
+ :master_timeout,
+ :timeout ]
+
+ method = HTTP_DELETE
+ path = Utils.__pathify "_ingest/pipeline", Utils.__escape(arguments[:id])
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = nil
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/ingest/get_pipeline.rb b/elasticsearch-api/lib/elasticsearch/api/actions/ingest/get_pipeline.rb
new file mode 100644
index 0000000..8b523fb
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/ingest/get_pipeline.rb
@@ -0,0 +1,31 @@
+module Elasticsearch
+ module API
+ module Ingest
+ module Actions
+
+ # Return a specified pipeline
+ #
+ # @option arguments [String] :id Comma separated list of pipeline ids. Wildcards supported (*Required*)
+ # @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/plugins/master/ingest.html
+ #
+ def get_pipeline(arguments={})
+ Utils.__report_unsupported_method(__method__)
+
+ raise ArgumentError, "Required argument 'id' missing" unless arguments[:id]
+
+ valid_params = [
+ :master_timeout ]
+
+ method = HTTP_GET
+ path = Utils.__pathify "_ingest/pipeline", Utils.__escape(arguments[:id])
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = nil
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/ingest/put_pipeline.rb b/elasticsearch-api/lib/elasticsearch/api/actions/ingest/put_pipeline.rb
new file mode 100644
index 0000000..1b846d1
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/ingest/put_pipeline.rb
@@ -0,0 +1,36 @@
+module Elasticsearch
+ module API
+ module Ingest
+ module Actions
+
+ # Add or update a specified pipeline
+ #
+ # @option arguments [String] :id Pipeline ID (*Required*)
+ # @option arguments [Hash] :body The ingest definition (*Required*)
+ # @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
+ # @option arguments [Time] :timeout Explicit operation timeout
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/plugins/master/ingest.html
+ #
+ def put_pipeline(arguments={})
+ Utils.__report_unsupported_method(__method__)
+
+ raise ArgumentError, "Required argument 'id' missing" unless arguments[:id]
+ raise ArgumentError, "Required argument 'body' missing" unless arguments[:body]
+
+ valid_params = [
+ :master_timeout,
+ :timeout ]
+
+ method = HTTP_PUT
+ path = Utils.__pathify "_ingest/pipeline", Utils.__escape(arguments[:id])
+
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = arguments[:body]
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/ingest/simulate.rb b/elasticsearch-api/lib/elasticsearch/api/actions/ingest/simulate.rb
new file mode 100644
index 0000000..dd0f4d9
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/ingest/simulate.rb
@@ -0,0 +1,32 @@
+module Elasticsearch
+ module API
+ module Ingest
+ module Actions
+
+ # Execute a specific pipeline against the set of documents provided in the body of the request
+ #
+ # @option arguments [String] :id Pipeline ID
+ # @option arguments [Hash] :body The pipeline definition (*Required*)
+ # @option arguments [Boolean] :verbose Verbose mode. Display data output for each processor
+ # in executed pipeline
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/master/simulate-pipeline-api.html
+ #
+ def simulate(arguments={})
+ Utils.__report_unsupported_method(__method__)
+
+ raise ArgumentError, "Required argument 'body' missing" unless arguments[:body]
+
+ valid_params = [ :verbose ]
+
+ method = HTTP_GET
+ path = Utils.__pathify "_ingest/pipeline", Utils.__escape(arguments[:id]), '_simulate'
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = arguments[:body]
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/list_benchmarks.rb b/elasticsearch-api/lib/elasticsearch/api/actions/list_benchmarks.rb
index 08f4d71..d924aba 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/list_benchmarks.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/list_benchmarks.rb
@@ -15,8 +15,6 @@ module Elasticsearch
# @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/search-benchmark.html
#
def list_benchmarks(arguments={})
- valid_params = [
- ]
method = HTTP_GET
path = "_bench"
params = {}
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/mpercolate.rb b/elasticsearch-api/lib/elasticsearch/api/actions/mpercolate.rb
index b9a4e04..072295f 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/mpercolate.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/mpercolate.rb
@@ -34,7 +34,8 @@ module Elasticsearch
valid_params = [
:ignore_unavailable,
:allow_no_indices,
- :expand_wildcards ]
+ :expand_wildcards,
+ :percolate_format ]
method = HTTP_GET
path = "_mpercolate"
@@ -44,7 +45,7 @@ module Elasticsearch
case
when body.is_a?(Array)
- payload = body.map { |d| d.is_a?(String) ? d : MultiJson.dump(d) }
+ payload = body.map { |d| d.is_a?(String) ? d : Elasticsearch::API.serializer.dump(d) }
payload << "" unless payload.empty?
payload = payload.join("\n")
else
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/msearch.rb b/elasticsearch-api/lib/elasticsearch/api/actions/msearch.rb
index db4e52b..1295fbf 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/msearch.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/msearch.rb
@@ -12,7 +12,7 @@ module Elasticsearch
# body: [
# { search: { query: { match_all: {} } } },
# { index: 'myindex', type: 'mytype', search: { query: { query_string: { query: '"Test 1"' } } } },
- # { search_type: 'count', search: { facets: { published: { terms: { field: 'published' } } } } }
+ # { search_type: 'count', search: { aggregations: { published: { terms: { field: 'published' } } } } }
# ]
#
# @example Perform multiple different searches as an array of meta/data pairs
@@ -23,7 +23,7 @@ module Elasticsearch
# { index: 'myindex', type: 'mytype' },
# { query: { query_string: { query: '"Test 1"' } } },
# { search_type: 'count' },
- # { facets: { published: { terms: { field: 'published' } } } }
+ # { aggregations: { published: { terms: { field: 'published' } } } }
# ]
#
# @option arguments [List] :index A comma-separated list of index names to use as default
@@ -56,11 +56,11 @@ module Elasticsearch
sum << data
sum
end.
- map { |item| MultiJson.dump(item) }
+ map { |item| Elasticsearch::API.serializer.dump(item) }
payload << "" unless payload.empty?
payload = payload.join("\n")
when body.is_a?(Array)
- payload = body.map { |d| d.is_a?(String) ? d : MultiJson.dump(d) }
+ payload = body.map { |d| d.is_a?(String) ? d : Elasticsearch::API.serializer.dump(d) }
payload << "" unless payload.empty?
payload = payload.join("\n")
else
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/nodes/hot_threads.rb b/elasticsearch-api/lib/elasticsearch/api/actions/nodes/hot_threads.rb
index f8f085a..b481e73 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/nodes/hot_threads.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/nodes/hot_threads.rb
@@ -20,6 +20,7 @@ module Elasticsearch
# @option arguments [Number] :snapshots Number of samples of thread stacktrace (default: 10)
# @option arguments [Number] :threads Specify the number of threads to provide information for (default: 3)
# @option arguments [String] :type The type to sample (default: cpu) (options: cpu, wait, block)
+ # @option arguments [Time] :timeout Explicit operation timeout
#
# @return [String]
#
@@ -30,7 +31,8 @@ module Elasticsearch
:interval,
:snapshots,
:threads,
- :type ]
+ :type,
+ :timeout ]
method = HTTP_GET
path = Utils.__pathify '_nodes', Utils.__listify(arguments[:node_id]), 'hot_threads'
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/nodes/info.rb b/elasticsearch-api/lib/elasticsearch/api/actions/nodes/info.rb
index 5b52545..d41766c 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/nodes/info.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/nodes/info.rb
@@ -35,6 +35,7 @@ module Elasticsearch
# @option arguments [Boolean] :settings Return information about node settings
# @option arguments [Boolean] :thread_pool Return information about the thread pool
# @option arguments [Boolean] :transport Return information about transport
+ # @option arguments [Time] :timeout Explicit operation timeout
#
# @see http://elasticsearch.org/guide/reference/api/admin-cluster-nodes-info/
#
@@ -52,9 +53,10 @@ module Elasticsearch
:process,
:settings,
:thread_pool,
- :transport ]
+ :transport,
+ :timeout ]
- valid_params = []
+ valid_params = [ :timeout ]
method = HTTP_GET
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/nodes/stats.rb b/elasticsearch-api/lib/elasticsearch/api/actions/nodes/stats.rb
index 8f807a3..9739b3b 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/nodes/stats.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/nodes/stats.rb
@@ -37,6 +37,7 @@ module Elasticsearch
# @option arguments [String] :level Specify the level for aggregating indices stats
# (options: node, indices, shards)
# @option arguments [List] :types A comma-separated list of document types for the `indexing` index metric
+ # @option arguments [Time] :timeout Explicit operation timeout
#
# @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/cluster-nodes-stats.html
#
@@ -53,7 +54,8 @@ module Elasticsearch
:groups,
:human,
:level,
- :types ]
+ :types,
+ :timeout ]
method = HTTP_GET
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/percolate.rb b/elasticsearch-api/lib/elasticsearch/api/actions/percolate.rb
index facb779..e32f432 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/percolate.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/percolate.rb
@@ -93,7 +93,7 @@ module Elasticsearch
method = HTTP_GET
path = Utils.__pathify Utils.__escape(arguments[:index]),
Utils.__escape(arguments[:type]),
- arguments[:id],
+ Utils.__escape(arguments[:id]),
'_percolate'
params = Utils.__validate_and_extract_params arguments, valid_params
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/ping.rb b/elasticsearch-api/lib/elasticsearch/api/actions/ping.rb
index d546cc4..2527352 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/ping.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/ping.rb
@@ -16,8 +16,14 @@ module Elasticsearch
params = {}
body = nil
- Utils.__rescue_from_not_found do
+ begin
perform_request(method, path, params, body).status == 200 ? true : false
+ rescue Exception => e
+ if e.class.to_s =~ /NotFound|ConnectionFailed/ || e.message =~ /Not\s*Found|404|ConnectionFailed/i
+ false
+ else
+ raise e
+ end
end
end
end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/reindex.rb b/elasticsearch-api/lib/elasticsearch/api/actions/reindex.rb
new file mode 100644
index 0000000..b00abeb
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/reindex.rb
@@ -0,0 +1,69 @@
+module Elasticsearch
+ module API
+ module Actions
+
+ # Copy documents from one index to another, potentially changing
+ # its settings, mappings and the documents itself.
+ #
+ # @example Copy documents into a different index
+ #
+ # client.reindex body: { source: { index: 'test1' }, dest: { index: 'test2' } }
+ #
+ # @example Limit the copied documents with a query
+ #
+ # client.reindex body: {
+ # source: {
+ # index: 'test1',
+ # query: { terms: { category: ['one', 'two'] } }
+ # },
+ # dest: {
+ # index: 'test2'
+ # }
+ # }
+ #
+ # @example Remove a field from reindexed documents
+ #
+ # client.reindex body: {
+ # source: {
+ # index: 'test1'
+ # },
+ # dest: {
+ # index: 'test3'
+ # },
+ # script: {
+ # inline: 'ctx._source.remove("category")'
+ # }
+ # }
+ #
+ # @option arguments [Hash] :body The definition of the operation (source index, target index, ...)
+ # (*Required*)
+ # @option arguments [Boolean] :refresh Whether the affected indexes should be refreshed
+ # @option arguments [Time] :timeout Time each individual bulk request should wait for shards
+ # that are unavailable. (Default: 1m)
+ # @option arguments [String] :consistency Explicit write consistency setting for the operation
+ # (Options: one, quorum, all)
+ # @option arguments [Boolean] :wait_for_completion Whether the request should block and wait until
+ # the operation has completed
+ # @option arguments [Float] :requests_per_second The throttling for this request in sub-requests per second.
+ # 0 means set no throttling (default)
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html
+ #
+ def reindex(arguments={})
+ raise ArgumentError, "Required argument 'body' missing" unless arguments[:body]
+ valid_params = [
+ :refresh,
+ :timeout,
+ :consistency,
+ :wait_for_completion,
+ :requests_per_second ]
+ method = 'POST'
+ path = "_reindex"
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = arguments[:body]
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/render_search_template.rb b/elasticsearch-api/lib/elasticsearch/api/actions/render_search_template.rb
new file mode 100644
index 0000000..edce15c
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/render_search_template.rb
@@ -0,0 +1,25 @@
+module Elasticsearch
+ module API
+ module Actions
+
+ # Pre-render search requests before they are executed and fill existing templates with template parameters
+ #
+ # @option arguments [String] :id The id of the stored search template
+ # @option arguments [Hash] :body The search definition template and its params
+ #
+ # @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/search-template.html
+ #
+ def render_search_template(arguments={})
+ valid_params = [
+ :id
+ ]
+ method = 'GET'
+ path = "_render/template"
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = arguments[:body]
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/search.rb b/elasticsearch-api/lib/elasticsearch/api/actions/search.rb
index c5540f2..8afacb8 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/search.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/search.rb
@@ -19,7 +19,7 @@ module Elasticsearch
# client.search index: 'myindex',
# body: {
# query: { match: { title: 'test' } },
- # facets: { tags: { terms: { field: 'tags' } } }
+ # aggregations: { tags: { terms: { field: 'tags' } } }
# }
#
# @example Paginating results: return 10 documents, beginning from the 10th
@@ -53,14 +53,14 @@ module Elasticsearch
# response = client.search index: 'myindex',
# body: {
# query: { match: { title: 'test' } },
- # facets: { tags: { terms: { field: 'tags' } } }
+ # aggregations: { tags: { terms: { field: 'tags' } } }
# }
#
# response = Hashie::Mash.new response
#
# response.hits.hits.first._source.title
#
- # response.facets.tags.terms.to_a.map { |f| "#{f.term} [#{f.count}]" }.join(', ')
+ # response.aggregations.tags.terms.to_a.map { |f| "#{f.term} [#{f.count}]" }.join(', ')
#
# @option arguments [List] :index A comma-separated list of index names to search; use `_all`
# or empty string to perform the operation on all indices
@@ -76,6 +76,8 @@ module Elasticsearch
# @option arguments [Boolean] :explain Specify whether to return detailed information about score computation
# as part of a hit
# @option arguments [List] :fields A comma-separated list of fields to return as part of a hit
+ # @option arguments [List] :fielddata_fields A comma-separated list of fields to return as the field data
+ # representation of a field for each hit
# @option arguments [Number] :from Starting offset (default: 0)
# @option arguments [String] :ignore_indices When performed on multiple indices, allows to ignore `missing` ones
# (options: none, missing)
@@ -85,6 +87,8 @@ module Elasticsearch
# @option arguments [String] :preference Specify the node or shard the operation should be performed on
# (default: random)
# @option arguments [String] :q Query in the Lucene query string syntax
+ # @option arguments [Boolean] :request_cache Specify if request cache should be used for this request
+ # (defaults to index level setting)
# @option arguments [List] :routing A comma-separated list of specific routing values
# @option arguments [Duration] :scroll Specify how long a consistent view of the index should be maintained
# for scrolled search
@@ -103,6 +107,7 @@ module Elasticsearch
# @option arguments [String] :suggest_mode Specify suggest mode (options: missing, popular, always)
# @option arguments [Number] :suggest_size How many suggestions to return in response
# @option arguments [Text] :suggest_text The source text for which the suggestions should be returned
+ # @option arguments [Number] :terminate_after The maximum number of documents to collect for each shard
# @option arguments [Time] :timeout Explicit operation timeout
# @option arguments [Boolean] :version Specify whether to return document version as part of a hit
#
@@ -120,6 +125,7 @@ module Elasticsearch
:default_operator,
:df,
:explain,
+ :fielddata_fields,
:fields,
:from,
:ignore_indices,
@@ -131,6 +137,7 @@ module Elasticsearch
:preference,
:q,
:query_cache,
+ :request_cache,
:routing,
:scroll,
:search_type,
@@ -145,6 +152,7 @@ module Elasticsearch
:suggest_mode,
:suggest_size,
:suggest_text,
+ :terminate_after,
:timeout,
:version ]
@@ -152,9 +160,14 @@ module Elasticsearch
path = Utils.__pathify( Utils.__listify(arguments[:index]), Utils.__listify(arguments[:type]), UNDERSCORE_SEARCH )
params = Utils.__validate_and_extract_params arguments, valid_params
+
body = arguments[:body]
- params[:fields] = Utils.__listify(params[:fields]) if params[:fields]
+ params[:fields] = Utils.__listify(params[:fields], :escape => false) if params[:fields]
+ params[:fielddata_fields] = Utils.__listify(params[:fielddata_fields], :escape => false) if params[:fielddata_fields]
+
+ # FIX: Unescape the `filter_path` parameter due to __listify default behavior. Investigate.
+ params[:filter_path] = defined?(EscapeUtils) ? EscapeUtils.unescape_url(params[:filter_path]) : CGI.unescape(params[:filter_path]) if params[:filter_path]
perform_request(method, path, params, body).body
end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/delete.rb b/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/delete.rb
index b1cd18f..28cc9e4 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/delete.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/delete.rb
@@ -14,6 +14,7 @@ module Elasticsearch
# @option arguments [String] :repository A repository name (*Required*)
# @option arguments [String] :snapshot A snapshot name (*Required*)
# @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
+ # @option arguments [Number,List] :ignore The list of HTTP errors to ignore
#
# @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/modules-snapshots.html
#
@@ -33,7 +34,11 @@ module Elasticsearch
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
- perform_request(method, path, params, body).body
+ if Array(arguments[:ignore]).include?(404)
+ Utils.__rescue_from_not_found { perform_request(method, path, params, body).body }
+ else
+ perform_request(method, path, params, body).body
+ end
end
end
end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/delete_repository.rb b/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/delete_repository.rb
index 0a74328..46dcd10 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/delete_repository.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/delete_repository.rb
@@ -12,6 +12,7 @@ module Elasticsearch
# @option arguments [List] :repository A comma-separated list of repository names (*Required*)
# @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
# @option arguments [Time] :timeout Explicit operation timeout
+ # @option arguments [Number,List] :ignore The list of HTTP errors to ignore
#
# @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/modules-snapshots.html
#
@@ -30,7 +31,11 @@ module Elasticsearch
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
- perform_request(method, path, params, body).body
+ if Array(arguments[:ignore]).include?(404)
+ Utils.__rescue_from_not_found { perform_request(method, path, params, body).body }
+ else
+ perform_request(method, path, params, body).body
+ end
end
end
end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/get.rb b/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/get.rb
index 10bc4e4..adbcab8 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/get.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/get.rb
@@ -20,6 +20,7 @@ module Elasticsearch
# @option arguments [String] :repository A repository name (*Required*)
# @option arguments [List] :snapshot A comma-separated list of snapshot names (*Required*)
# @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
+ # @option arguments [Number,List] :ignore The list of HTTP errors to ignore
#
# @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/modules-snapshots.html
#
@@ -39,7 +40,11 @@ module Elasticsearch
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
- perform_request(method, path, params, body).body
+ if Array(arguments[:ignore]).include?(404)
+ Utils.__rescue_from_not_found { perform_request(method, path, params, body).body }
+ else
+ perform_request(method, path, params, body).body
+ end
end
end
end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/get_repository.rb b/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/get_repository.rb
index 2623702..693a22d 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/get_repository.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/get_repository.rb
@@ -18,6 +18,7 @@ module Elasticsearch
# @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
# @option arguments [Boolean] :local Return local information, do not retrieve the state from master node
# (default: false)
+ # @option arguments [Number,List] :ignore The list of HTTP errors to ignore
#
# @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/modules-snapshots.html
#
@@ -34,7 +35,11 @@ module Elasticsearch
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
- perform_request(method, path, params, body).body
+ if Array(arguments[:ignore]).include?(404)
+ Utils.__rescue_from_not_found { perform_request(method, path, params, body).body }
+ else
+ perform_request(method, path, params, body).body
+ end
end
end
end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/status.rb b/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/status.rb
index 35bc1da..45d2b61 100644
--- a/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/status.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/snapshot/status.rb
@@ -16,6 +16,7 @@ module Elasticsearch
# @option arguments [String] :repository A repository name
# @option arguments [List] :snapshot A comma-separated list of snapshot names
# @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
+ # @option arguments [Number,List] :ignore The list of HTTP errors to ignore
#
# @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/modules-snapshots.html#_snapshot_status
#
@@ -32,7 +33,11 @@ module Elasticsearch
params = Utils.__validate_and_extract_params arguments, valid_params
body = nil
- perform_request(method, path, params, body).body
+ if Array(arguments[:ignore]).include?(404)
+ Utils.__rescue_from_not_found { perform_request(method, path, params, body).body }
+ else
+ perform_request(method, path, params, body).body
+ end
end
end
end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/tasks/cancel.rb b/elasticsearch-api/lib/elasticsearch/api/actions/tasks/cancel.rb
new file mode 100644
index 0000000..0e4122e
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/tasks/cancel.rb
@@ -0,0 +1,40 @@
+module Elasticsearch
+ module API
+ module Tasks
+ module Actions
+
+ # Cancel a specific task
+ #
+ # @option arguments [Number] :task_id Cancel the task with specified id
+ # @option arguments [List] :node_id A comma-separated list of node IDs or names to limit the returned
+ # information; use `_local` to return information from the node
+ # you're connecting to, leave empty to get information from all nodes
+ # @option arguments [List] :actions A comma-separated list of actions that should be returned.
+ # Leave empty to return all.
+ # @option arguments [String] :parent_node Cancel tasks with specified parent node.
+ # @option arguments [Number] :parent_task Cancel tasks with specified parent task id.
+ # Set to -1 to cancel all.
+ #
+ # @see http://www.elastic.co/guide/en/elasticsearch/reference/master/tasks-cancel.html
+ #
+ def cancel(arguments={})
+ valid_params = [
+ :node_id,
+ :actions,
+ :parent_node,
+ :parent_task ]
+
+ task_id = arguments.delete(:task_id)
+
+ method = 'POST'
+ path = "_tasks"
+ path = Utils.__pathify( '_tasks', Utils.__escape(task_id), '_cancel' )
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = nil
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/tasks/list.rb b/elasticsearch-api/lib/elasticsearch/api/actions/tasks/list.rb
new file mode 100644
index 0000000..aa25a6d
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/tasks/list.rb
@@ -0,0 +1,50 @@
+module Elasticsearch
+ module API
+ module Tasks
+ module Actions
+
+ # Return the list of tasks
+ #
+ # @option arguments [Number] :task_id Return the task with specified id
+ # @option arguments [List] :node_id A comma-separated list of node IDs or names to limit the returned
+ # information; use `_local` to return information from the node
+ # you're connecting to, leave empty to get information from all nodes
+ # @option arguments [List] :actions A comma-separated list of actions that should be returned.
+ # Leave empty to return all.
+ # @option arguments [Boolean] :detailed Return detailed task information (default: false)
+ # @option arguments [String] :parent_node Return tasks with specified parent node.
+ # @option arguments [Number] :parent_task Return tasks with specified parent task id.
+ # Set to -1 to return all.
+ # @option arguments [String] :group_by Group tasks by nodes or parent/child relationships
+ # Options: nodes, parents
+ # @option arguments [Boolean] :wait_for_completion Wait for the matching tasks to complete (default: false)
+ #
+ # @see http://www.elastic.co/guide/en/elasticsearch/reference/master/tasks-list.html
+ #
+ def list(arguments={})
+ valid_params = [
+ :node_id,
+ :actions,
+ :detailed,
+ :parent_node,
+ :parent_task,
+ :group_by,
+ :wait_for_completion ]
+
+ unsupported_params = [ :group_by ]
+ Utils.__report_unsupported_parameters(arguments.keys, unsupported_params)
+
+ task_id = arguments.delete(:task_id)
+
+ method = 'GET'
+ path = "_tasks"
+ path = Utils.__pathify( '_tasks', Utils.__escape(task_id) )
+ params = Utils.__validate_and_extract_params arguments, valid_params
+ body = nil
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/update_by_query.rb b/elasticsearch-api/lib/elasticsearch/api/actions/update_by_query.rb
new file mode 100644
index 0000000..10d4b45
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/actions/update_by_query.rb
@@ -0,0 +1,131 @@
+module Elasticsearch
+ module API
+ module Actions
+
+ # Process every document matching a query, potentially updating it
+ #
+ # @example Update all documents in the index, eg. to pick up new mappings
+ #
+ # client.update_by_query index: 'articles'
+ #
+ # @example Update a property of documents matching a query in the index
+ #
+ # client.update_by_query index: 'article',
+ # body: {
+ # script: { inline: 'ctx._source.views += 1' },
+ # query: { match: { title: 'foo' } }
+ # }
+ #
+ # @option arguments [List] :index A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices (*Required*)
+ # @option arguments [List] :type A comma-separated list of document types to search; leave empty to perform the operation on all types
+ # @option arguments [Hash] :body The search definition using the Query DSL
+ # @option arguments [String] :analyzer The analyzer to use for the query string
+ # @option arguments [Boolean] :analyze_wildcard Specify whether wildcard and prefix queries should be analyzed (default: false)
+ # @option arguments [String] :default_operator The default operator for query string query (AND or OR) (options: AND, OR)
+ # @option arguments [String] :df The field to use as default where no field prefix is given in the query string
+ # @option arguments [Boolean] :explain Specify whether to return detailed information about score computation as part of a hit
+ # @option arguments [List] :fields A comma-separated list of fields to return as part of a hit
+ # @option arguments [List] :fielddata_fields A comma-separated list of fields to return as the field data representation of a field for each hit
+ # @option arguments [Number] :from Starting offset (default: 0)
+ # @option arguments [Boolean] :ignore_unavailable Whether specified concrete indices should be ignored when unavailable (missing or closed)
+ # @option arguments [Boolean] :allow_no_indices Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)
+ # @option arguments [String] :conflicts What to do when the reindex hits version conflicts? (options: abort, proceed)
+ # @option arguments [String] :expand_wildcards Whether to expand wildcard expression to concrete indices that are open, closed or both. (options: open, closed, none, all)
+ # @option arguments [Boolean] :lenient Specify whether format-based query failures (such as providing text to a numeric field) should be ignored
+ # @option arguments [Boolean] :lowercase_expanded_terms Specify whether query terms should be lowercased
+ # @option arguments [String] :pipeline Ingest pipeline to set on index requests made by this action. (default: none)
+ # @option arguments [String] :preference Specify the node or shard the operation should be performed on (default: random)
+ # @option arguments [String] :q Query in the Lucene query string syntax
+ # @option arguments [List] :routing A comma-separated list of specific routing values
+ # @option arguments [Duration] :scroll Specify how long a consistent view of the index should be maintained for scrolled search
+ # @option arguments [String] :search_type Search operation type (options: query_then_fetch, dfs_query_then_fetch)
+ # @option arguments [Time] :search_timeout Explicit timeout for each search request. Defaults to no timeout.
+ # @option arguments [Number] :size Number of hits to return (default: 10)
+ # @option arguments [List] :sort A comma-separated list of <field>:<direction> pairs
+ # @option arguments [List] :_source True or false to return the _source field or not, or a list of fields to return
+ # @option arguments [List] :_source_exclude A list of fields to exclude from the returned _source field
+ # @option arguments [List] :_source_include A list of fields to extract and return from the _source field
+ # @option arguments [Number] :terminate_after The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early.
+ # @option arguments [List] :stats Specific 'tag' of the request for logging and statistical purposes
+ # @option arguments [String] :suggest_field Specify which field to use for suggestions
+ # @option arguments [String] :suggest_mode Specify suggest mode (options: missing, popular, always)
+ # @option arguments [Number] :suggest_size How many suggestions to return in response
+ # @option arguments [Text] :suggest_text The source text for which the suggestions should be returned
+ # @option arguments [Time] :timeout Time each individual bulk request should wait for shards that are unavailable.
+ # @option arguments [Boolean] :track_scores Whether to calculate and return scores even if they are not used for sorting
+ # @option arguments [Boolean] :version Specify whether to return document version as part of a hit
+ # @option arguments [Boolean] :version_type Should the document increment the version number (internal) on hit or not (reindex)
+ # @option arguments [Boolean] :request_cache Specify if request cache should be used for this request or not, defaults to index level setting
+ # @option arguments [Boolean] :refresh Should the effected indexes be refreshed?
+ # @option arguments [String] :consistency Explicit write consistency setting for the operation (options: one, quorum, all)
+ # @option arguments [Integer] :scroll_size Size on the scroll request powering the update_by_query
+ # @option arguments [Boolean] :wait_for_completion Should the request should block until the reindex is complete.
+ # @option arguments [Float] :requests_per_second The throttle for this request in sub-requests per second. 0 means set no throttle.
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update-by-query.html
+ #
+ def update_by_query(arguments={})
+ raise ArgumentError, "Required argument 'index' missing" unless arguments[:index]
+
+ valid_params = [
+ :analyzer,
+ :analyze_wildcard,
+ :default_operator,
+ :df,
+ :explain,
+ :fields,
+ :fielddata_fields,
+ :from,
+ :ignore_unavailable,
+ :allow_no_indices,
+ :conflicts,
+ :expand_wildcards,
+ :lenient,
+ :lowercase_expanded_terms,
+ :pipeline,
+ :preference,
+ :q,
+ :routing,
+ :scroll,
+ :search_type,
+ :search_timeout,
+ :size,
+ :sort,
+ :_source,
+ :_source_exclude,
+ :_source_include,
+ :terminate_after,
+ :stats,
+ :suggest_field,
+ :suggest_mode,
+ :suggest_size,
+ :suggest_text,
+ :timeout,
+ :track_scores,
+ :version,
+ :version_type,
+ :request_cache,
+ :refresh,
+ :consistency,
+ :scroll_size,
+ :wait_for_completion,
+ :requests_per_second ]
+
+ unsupported_params = [ :pipeline ]
+ Utils.__report_unsupported_parameters(arguments, unsupported_params)
+
+ method = HTTP_POST
+
+ path = Utils.__pathify Utils.__listify(arguments[:index]),
+ Utils.__listify(arguments[:type]),
+ '/_update_by_query'
+
+ params = Utils.__validate_and_extract_params arguments, valid_params
+
+ body = arguments[:body]
+
+ perform_request(method, path, params, body).body
+ end
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/namespace/ingest.rb b/elasticsearch-api/lib/elasticsearch/api/namespace/ingest.rb
new file mode 100644
index 0000000..d89c9f2
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/namespace/ingest.rb
@@ -0,0 +1,20 @@
+module Elasticsearch
+ module API
+ module Ingest
+ module Actions; end
+
+ # Client for the "ingest" namespace (includes the {Ingest::Actions} methods)
+ #
+ class IngestClient
+ include Common::Client, Common::Client::Base, Ingest::Actions
+ end
+
+ # Proxy method for {IngestClient}, available in the receiving object
+ #
+ def ingest
+ @ingest ||= IngestClient.new(self)
+ end
+
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/namespace/tasks.rb b/elasticsearch-api/lib/elasticsearch/api/namespace/tasks.rb
new file mode 100644
index 0000000..2543cb5
--- /dev/null
+++ b/elasticsearch-api/lib/elasticsearch/api/namespace/tasks.rb
@@ -0,0 +1,20 @@
+module Elasticsearch
+ module API
+ module Tasks
+ module Actions; end
+
+ # Client for the "tasks" namespace (includes the {Tasks::Actions} methods)
+ #
+ class TasksClient
+ include Common::Client, Common::Client::Base, Tasks::Actions
+ end
+
+ # Proxy method for {TasksClient}, available in the receiving object
+ #
+ def tasks
+ @tasks ||= TasksClient.new(self)
+ end
+
+ end
+ end
+end
diff --git a/elasticsearch-api/lib/elasticsearch/api/utils.rb b/elasticsearch-api/lib/elasticsearch/api/utils.rb
index d0183bd..5b4418e 100644
--- a/elasticsearch-api/lib/elasticsearch/api/utils.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/utils.rb
@@ -28,13 +28,18 @@ module Elasticsearch
# @example Escape values
# __listify('foo','bar^bam') # => 'foo,bar%5Ebam'
#
+ # @example Do not escape the values
+ # __listify('foo','bar^bam', escape: false) # => 'foo,bar^bam'
+ #
# @api private
def __listify(*list)
+ options = list.last.is_a?(Hash) ? list.pop : {}
+
Array(list).flatten.
map { |e| e.respond_to?(:split) ? e.split(',') : e }.
flatten.
compact.
- map { |e| __escape(e) }.
+ map { |e| options[:escape] == false ? e : __escape(e) }.
join(',')
end
@@ -60,6 +65,10 @@ module Elasticsearch
# Convert an array of payloads into Elasticsearch `header\ndata` format
#
+ # Supports various different formats of the payload: Array of Strings, Header/Data pairs,
+ # or the conveniency "combined" format where data is passed along with the header
+ # in a single item.
+ #
# Elasticsearch::API::Utils.__bulkify [
# { :index => { :_index => 'myindexA', :_type => 'mytype', :_id => '1', :data => { :title => 'Test' } } },
# { :update => { :_index => 'myindexB', :_type => 'mytype', :_id => '2', :data => { :doc => { :title => 'Update' } } } }
@@ -71,22 +80,24 @@ module Elasticsearch
# # => {"doc":{"title":"Update"}}
#
def __bulkify(payload)
+ operations = %w[index create delete update]
+
case
+
# Hashes with `:data`
- when payload.any? { |d| d.is_a?(Hash) && d.values.first.is_a?(Hash) && (d.values.first[:data] || d.values.first['data']) }
+ when payload.any? { |d| d.is_a?(Hash) && d.values.first.is_a?(Hash) && operations.include?(d.keys.first.to_s) && (d.values.first[:data] || d.values.first['data']) }
payload = payload.
- inject([]) do |sum, item|
- operation, meta = item.to_a.first
- meta = meta.clone
- data = meta.delete(:data) || meta.delete('data')
-
- sum << { operation => meta }
- sum << data if data
- sum
- end.
- map { |item| MultiJson.dump(item) }
- payload << "" unless payload.empty?
- return payload.join("\n")
+ inject([]) do |sum, item|
+ operation, meta = item.to_a.first
+ meta = meta.clone
+ data = meta.delete(:data) || meta.delete('data')
+
+ sum << { operation => meta }
+ sum << data if data
+ sum
+ end.
+ map { |item| Elasticsearch::API.serializer.dump(item) }
+ payload << '' unless payload.empty?
# Array of strings
when payload.all? { |d| d.is_a? String }
@@ -94,7 +105,7 @@ module Elasticsearch
# Header/Data pairs
else
- payload = payload.map { |item| MultiJson.dump(item) }
+ payload = payload.map { |item| Elasticsearch::API.serializer.dump(item) }
payload << ''
end
@@ -133,7 +144,7 @@ module Elasticsearch
arguments
else
__validate_params(arguments, params)
- __extract_params(arguments, params)
+ __extract_params(arguments, params, options.merge(:escape => false))
end
end
@@ -144,10 +155,10 @@ module Elasticsearch
end
end
- def __extract_params(arguments, params=[])
+ def __extract_params(arguments, params=[], options={})
result = arguments.select { |k,v| COMMON_QUERY_PARAMS.include?(k) || params.include?(k) }
result = Hash[result] unless result.is_a?(Hash) # Normalize Ruby 1.8 and Ruby 1.9 Hash#select behaviour
- result = Hash[result.map { |k,v| v.is_a?(Array) ? [k, __listify(v)] : [k,v] }] # Listify Arrays
+ result = Hash[result.map { |k,v| v.is_a?(Array) ? [k, __listify(v, options)] : [k,v] }] # Listify Arrays
result
end
@@ -168,9 +179,6 @@ module Elasticsearch
# @api private
#
def __extract_parts(arguments, valid_parts=[])
- # require 'pry'; binding.pry;
- # parts = arguments.keys.select { |a| valid_parts.include?(a) }.map { |a| a.to_s }.sort
-
parts = Hash[arguments.select { |k,v| valid_parts.include?(k) }]
parts = parts.reduce([]) { |sum, item| k, v = item; v.is_a?(TrueClass) ? sum << k.to_s : sum << v }
@@ -195,6 +203,61 @@ module Elasticsearch
end
end
+ def __report_unsupported_parameters(arguments, params=[])
+ messages = []
+ unsupported_params = params.select {|d| d.is_a?(Hash) ? arguments.include?(d.keys.first) : arguments.include?(d) }
+
+ unsupported_params.each do |param|
+ name = case param
+ when Symbol
+ param
+ when Hash
+ param.keys.first
+ else
+ raise ArgumentError, "The param must be a Symbol or a Hash"
+ end
+
+ explanation = if param.is_a?(Hash)
+ ". #{param.values.first[:explanation]}."
+ else
+ ". This parameter is not supported in the version you're using: #{Elasticsearch::API::VERSION}, and will be removed in the next release."
+ end
+
+ message = "[!] You are using unsupported parameter [:#{name}]"
+
+ if source = caller && caller.last
+ message += " in `#{source}`"
+ end
+
+ message += explanation
+
+ messages << message
+ end
+
+ unless messages.empty?
+ if terminal = STDERR.tty?
+ STDERR.puts messages.map { |m| "\e[31;1m#{m}\e[0m" }.join("\n")
+ else
+ STDERR.puts messages.join("\n")
+ end
+ end
+ end
+
+ def __report_unsupported_method(name)
+ message = "[!] You are using unsupported method [#{name}]"
+ if source = caller && caller.last
+ message += " in `#{source}`"
+ end
+
+ message += ". This method is not supported in the version you're using: #{Elasticsearch::API::VERSION}, and will be removed in the next release."
+
+ if terminal = STDERR.tty?
+ STDERR.puts "\e[31;1m#{message}\e[0m"
+ else
+ STDERR.puts message
+ end
+ end
+
extend self
end
end
diff --git a/elasticsearch-api/lib/elasticsearch/api/version.rb b/elasticsearch-api/lib/elasticsearch/api/version.rb
index 2a39bf4..5bd0814 100644
--- a/elasticsearch-api/lib/elasticsearch/api/version.rb
+++ b/elasticsearch-api/lib/elasticsearch/api/version.rb
@@ -1,5 +1,5 @@
module Elasticsearch
module API
- VERSION = "1.0.12"
+ VERSION = "2.0.0"
end
end
diff --git a/elasticsearch-api/test/integration/yaml_test_runner.rb b/elasticsearch-api/test/integration/yaml_test_runner.rb
index 7176c14..ba3277f 100644
--- a/elasticsearch-api/test/integration/yaml_test_runner.rb
+++ b/elasticsearch-api/test/integration/yaml_test_runner.rb
@@ -14,7 +14,7 @@ require 'elasticsearch/extensions/test/startup_shutdown'
require 'elasticsearch/extensions/test/profiling' unless JRUBY
# Skip features
-skip_features = 'stash_in_path,requires_replica'
+skip_features = 'stash_in_path,requires_replica,headers'
SKIP_FEATURES = ENV.fetch('TEST_SKIP_FEATURES', skip_features)
# Turn configuration
@@ -23,12 +23,15 @@ Turn.config.format = :pretty
# Launch test cluster
#
-Elasticsearch::Extensions::Test::Cluster.start(nodes: 1) if ENV['SERVER'] and not Elasticsearch::Extensions::Test::Cluster.running?
+if ENV['SERVER'] and not Elasticsearch::Extensions::Test::Cluster.running?
+ es_params = "-D es.repositories.url.allowed_urls=http://snapshot.test* -D es.path.repo=/tmp -D es.node.testattr=test " + ENV['TEST_CLUSTER_PARAMS'].to_s
+ Elasticsearch::Extensions::Test::Cluster.start(nodes: 1, es_params: es_params )
+end
# Register `at_exit` handler for server shutdown.
# MUST be called before requiring `test/unit`.
#
-at_exit { Elasticsearch::Extensions::Test::Cluster.stop if ENV['SERVER'] }
+at_exit { Elasticsearch::Extensions::Test::Cluster.stop if ENV['SERVER'] and Elasticsearch::Extensions::Test::Cluster.running? }
class String
# Reset the `ansi` method on CI
@@ -80,7 +83,7 @@ url = ENV.fetch('TEST_CLUSTER_URL', "http://localhost:#{ENV['TEST_CLUSTER_PORT']
$client ||= Elasticsearch::Client.new url: url
$client.transport.logger = logger unless ENV['QUIET'] || ENV['CI']
-$client.transport.tracer = tracer if ENV['CI']
+# $client.transport.tracer = tracer if ENV['CI']
# Store Elasticsearch version
#
@@ -128,6 +131,7 @@ end
module Elasticsearch
module YamlTestSuite
+ $last_response = ''
$results = {}
$stash = {}
@@ -180,7 +184,7 @@ module Elasticsearch
$stderr.puts "ARGUMENTS: #{arguments.inspect}" if ENV['DEBUG']
- $results[test.hash] = namespace.reduce($client) do |memo, current|
+ $last_response = namespace.reduce($client) do |memo, current|
unless current == namespace.last
memo = memo.send(current)
else
@@ -188,12 +192,18 @@ module Elasticsearch
end
memo
end
+
+ $results[test.hash] = $last_response
end
- def evaluate(test, property)
- property.gsub(/\\\./, '_____').split('.').reduce($results[test.hash]) do |memo, attr|
+ def evaluate(test, property, response=nil)
+ response ||= $results[test.hash]
+ property.gsub(/\\\./, '_____').split('.').reduce(response) do |memo, attr|
if memo
- attr = attr.gsub(/_____/, '.') if attr
+ if attr
+ attr = attr.gsub(/_____/, '.')
+ attr = $stash[attr] if attr.start_with? '$'
+ end
memo = memo.is_a?(Hash) ? memo[attr] : memo[attr.to_i]
end
memo
@@ -265,8 +275,9 @@ end
include Elasticsearch::YamlTestSuite
-PATH = Pathname(ENV['TEST_REST_API_SPEC'] || \
- File.expand_path('../../../../tmp/elasticsearch/rest-api-spec/test', __FILE__))
+rest_api_test_source = $client.info['version']['number'] < '2' ? '../../../../tmp/elasticsearch/rest-api-spec/test' : '../../../../tmp/elasticsearch/rest-api-spec/src/main/resources/rest-api-spec/test'
+PATH = Pathname(ENV.fetch('TEST_REST_API_SPEC', File.expand_path(rest_api_test_source, __FILE__)))
+
suites = Dir.glob(PATH.join('*')).map { |d| Pathname(d) }
suites = suites.select { |s| s.to_s =~ Regexp.new(ENV['FILTER']) } if ENV['FILTER']
@@ -278,8 +289,23 @@ suites.each do |suite|
# --- Register context setup -------------------------------------------
#
setup do
- $client.indices.delete index: '_all'
- $client.indices.delete_template name: '*'
+ $client.indices.delete index: '_all', ignore: 404
+ $client.indices.delete_template name: '*', ignore: 404
+ $client.snapshot.delete repository: 'test_repo_create_1', snapshot: 'test_snapshot', ignore: 404
+ $client.snapshot.delete repository: 'test_repo_restore_1', snapshot: 'test_snapshot', ignore: 404
+ $client.snapshot.delete repository: 'test_cat_snapshots_1', snapshot: 'snap1', ignore: 404
+ $client.snapshot.delete repository: 'test_cat_snapshots_1', snapshot: 'snap2', ignore: 404
+ $client.snapshot.delete_repository repository: 'test_repo_create_1', ignore: 404
+ $client.snapshot.delete_repository repository: 'test_repo_restore_1', ignore: 404
+ $client.snapshot.delete_repository repository: 'test_repo_get_1', ignore: 404
+ $client.snapshot.delete_repository repository: 'test_repo_get_2', ignore: 404
+ $client.snapshot.delete_repository repository: 'test_cat_repo_1', ignore: 404
+ $client.snapshot.delete_repository repository: 'test_cat_repo_2', ignore: 404
+ $client.snapshot.delete_repository repository: 'test_cat_snapshots_1', ignore: 404
+ # FIXME: This shouldn't be needed -------------
+ FileUtils.rm_rf('/tmp/test_repo_create_1_loc')
+ FileUtils.rm_rf('/tmp/test_repo_restore_1_loc')
+ # ---------------------------------------------
$results = {}
$stash = {}
end
@@ -287,7 +313,7 @@ suites.each do |suite|
# --- Register context teardown ----------------------------------------
#
teardown do
- $client.indices.delete index: '_all'
+ $client.indices.delete index: '_all', ignore: 404
end
files = Dir[suite.join('*.{yml,yaml}')]
@@ -322,10 +348,18 @@ suites.each do |suite|
# --- Register test setup -------------------------------------------
setup do
actions.select { |a| a['setup'] }.first['setup'].each do |action|
- next unless action['do']
- api, arguments = action['do'].to_a.first
- arguments = Utils.symbolize_keys(arguments)
- Runner.perform_api_call((test.to_s + '___setup'), api, arguments)
+ if action['do']
+ api, arguments = action['do'].to_a.first
+ arguments = Utils.symbolize_keys(arguments)
+ Runner.perform_api_call((test.to_s + '___setup'), api, arguments)
+ end
+ if action['set']
+ stash = action['set']
+ property, variable = stash.to_a.first
+ result = Runner.evaluate(test, property, $last_response)
+ $stderr.puts "STASH: '$#{variable}' => #{result.inspect}" if ENV['DEBUG']
+ Runner.set variable, result
+ end
end
end
@@ -386,8 +420,10 @@ suites.each do |suite|
when property = action['is_false']
result = Runner.evaluate(test, property)
- $stderr.puts "CHECK: Expected '#{property}' to be false, is: #{result.inspect}" if ENV['DEBUG']
- assert( !!! result, "Property '#{property}' should be false, is: #{result.inspect}")
+ $stderr.puts "CHECK: Expected '#{property}' to be nil, false, 0 or empty string, is: #{result.inspect}" if ENV['DEBUG']
+ assert_block "Property '#{property}' should be nil, false, 0 or empty string, but is: #{result.inspect}" do
+ result.nil? || result == false || result == 0 || result == ''
+ end
when a = action['match']
property, value = a.to_a.first
diff --git a/elasticsearch-api/test/unit/api_test.rb b/elasticsearch-api/test/unit/api_test.rb
index a4ce142..989b2db 100644
--- a/elasticsearch-api/test/unit/api_test.rb
+++ b/elasticsearch-api/test/unit/api_test.rb
@@ -17,6 +17,10 @@ module Elasticsearch
assert_equal 'bar', Elasticsearch::API.settings[:foo]
end
+ should "have default serializer" do
+ assert_equal MultiJson, Elasticsearch::API.serializer
+ end
+
end
end
diff --git a/elasticsearch-api/test/unit/bulk_test.rb b/elasticsearch-api/test/unit/bulk_test.rb
index 46228e0..f39936d 100644
--- a/elasticsearch-api/test/unit/bulk_test.rb
+++ b/elasticsearch-api/test/unit/bulk_test.rb
@@ -16,7 +16,7 @@ module Elasticsearch
if RUBY_1_8
lines = body.split("\n")
- assert_equal 5, lines.size
+ assert_equal 7, lines.size
assert_match /\{"index"\:\{/, lines[0]
assert_match /\{"title"\:"Test"/, lines[1]
assert_match /\{"update"\:\{/, lines[2]
@@ -28,6 +28,8 @@ module Elasticsearch
{"update":{"_index":"myindexB","_type":"mytype","_id":"2"}}
{"doc":{"title":"Update"}}
{"delete":{"_index":"myindexC","_type":"mytypeC","_id":"3"}}
+ {"index":{"_index":"myindexD","_type":"mytype","_id":"1"}}
+ {"data":"MYDATA"}
PAYLOAD
end
true
@@ -36,7 +38,8 @@ module Elasticsearch
subject.bulk :body => [
{ :index => { :_index => 'myindexA', :_type => 'mytype', :_id => '1', :data => { :title => 'Test' } } },
{ :update => { :_index => 'myindexB', :_type => 'mytype', :_id => '2', :data => { :doc => { :title => 'Update' } } } },
- { :delete => { :_index => 'myindexC', :_type => 'mytypeC', :_id => '3' } }
+ { :delete => { :_index => 'myindexC', :_type => 'mytypeC', :_id => '3' } },
+ { :index => { :_index => 'myindexD', :_type => 'mytype', :_id => '1', :data => { :data => 'MYDATA' } } },
]
end
@@ -50,6 +53,32 @@ module Elasticsearch
subject.bulk :index => 'myindex', :body => []
end
+ should "handle `:data` keys correctly in header/data payloads" do
+ subject.expects(:perform_request).with do |method, url, params, body|
+ lines = body.split("\n")
+ assert_equal 2, lines.size
+
+ header = MultiJson.load(lines.first)
+ data = MultiJson.load(lines.last)
+
+ assert_equal 'myindex', header['update']['_index']
+ assert_equal 'mytype', header['update']['_type']
+ assert_equal '1', header['update']['_id']
+
+ assert_equal({'data' => { 'title' => 'Update' }}, data['doc'])
+ # assert_equal <<-PAYLOAD.gsub(/^\s+/, ''), body
+ # {"update":{"_index":"myindex","_type":"mytype","_id":"1"}}
+ # {"doc":{"data":{"title":"Update"}}}
+ # PAYLOAD
+ true
+ end.returns(FakeResponse.new)
+
+ subject.bulk :body => [
+ { :update => { :_index => 'myindex', :_type => 'mytype', :_id => '1' } },
+ { :doc => { :data => { :title => 'Update' } } }
+ ]
+ end
+
should "post a string payload" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal "foo\nbar", body
@@ -87,6 +116,16 @@ module Elasticsearch
subject.bulk :index => 'foo^bar', :body => []
end
+ should "not duplicate the type" do
+ subject.expects(:perform_request).with do |method, url, params, body|
+ assert_equal 'myindex/mytype/_bulk', url
+ assert_empty params
+ true
+ end.returns(FakeResponse.new)
+
+ subject.bulk :index => 'myindex', :type => 'mytype', :body => []
+ end
+
end
end
diff --git a/elasticsearch-api/test/unit/cat/plugins_test.rb b/elasticsearch-api/test/unit/cat/nodeattrs_test.rb
similarity index 72%
copy from elasticsearch-api/test/unit/cat/plugins_test.rb
copy to elasticsearch-api/test/unit/cat/nodeattrs_test.rb
index d81391d..98aa358 100644
--- a/elasticsearch-api/test/unit/cat/plugins_test.rb
+++ b/elasticsearch-api/test/unit/cat/nodeattrs_test.rb
@@ -2,21 +2,21 @@ require 'test_helper'
module Elasticsearch
module Test
- class CatPluginsTest < ::Test::Unit::TestCase
+ class CatNodeattrsTest < ::Test::Unit::TestCase
- context "Cat: Plugins" do
+ context "Cat: Nodeattrs" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '/_cat/plugins', url
+ assert_equal '_cat/nodeattrs', url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cat.plugins
+ subject.cat.nodeattrs
end
end
diff --git a/elasticsearch-api/test/unit/cat/plugins_test.rb b/elasticsearch-api/test/unit/cat/plugins_test.rb
index d81391d..a2d5e2d 100644
--- a/elasticsearch-api/test/unit/cat/plugins_test.rb
+++ b/elasticsearch-api/test/unit/cat/plugins_test.rb
@@ -10,7 +10,7 @@ module Elasticsearch
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '/_cat/plugins', url
+ assert_equal '_cat/plugins', url
assert_equal Hash.new, params
assert_nil body
true
diff --git a/elasticsearch-api/test/unit/cat/plugins_test.rb b/elasticsearch-api/test/unit/cat/repositories_test.rb
similarity index 70%
copy from elasticsearch-api/test/unit/cat/plugins_test.rb
copy to elasticsearch-api/test/unit/cat/repositories_test.rb
index d81391d..682b8a4 100644
--- a/elasticsearch-api/test/unit/cat/plugins_test.rb
+++ b/elasticsearch-api/test/unit/cat/repositories_test.rb
@@ -2,21 +2,21 @@ require 'test_helper'
module Elasticsearch
module Test
- class CatPluginsTest < ::Test::Unit::TestCase
+ class CatRepositoriesTest < ::Test::Unit::TestCase
- context "Cat: Plugins" do
+ context "Cat: Repositories" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '/_cat/plugins', url
+ assert_equal '_cat/repositories', url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cat.plugins
+ subject.cat.repositories
end
end
diff --git a/elasticsearch-api/test/unit/cat/plugins_test.rb b/elasticsearch-api/test/unit/cat/snapshots_test.rb
similarity index 69%
copy from elasticsearch-api/test/unit/cat/plugins_test.rb
copy to elasticsearch-api/test/unit/cat/snapshots_test.rb
index d81391d..4568e2e 100644
--- a/elasticsearch-api/test/unit/cat/plugins_test.rb
+++ b/elasticsearch-api/test/unit/cat/snapshots_test.rb
@@ -2,21 +2,21 @@ require 'test_helper'
module Elasticsearch
module Test
- class CatPluginsTest < ::Test::Unit::TestCase
+ class CatSnapshotsTest < ::Test::Unit::TestCase
- context "Cat: Plugins" do
+ context "Cat: Snapshots" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '/_cat/plugins', url
+ assert_equal '_cat/snapshots/foo', url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cat.plugins
+ subject.cat.snapshots :repository => 'foo'
end
end
diff --git a/elasticsearch-api/test/unit/cat/plugins_test.rb b/elasticsearch-api/test/unit/cat/tasks_test.rb
similarity index 73%
copy from elasticsearch-api/test/unit/cat/plugins_test.rb
copy to elasticsearch-api/test/unit/cat/tasks_test.rb
index d81391d..dbcef02 100644
--- a/elasticsearch-api/test/unit/cat/plugins_test.rb
+++ b/elasticsearch-api/test/unit/cat/tasks_test.rb
@@ -2,21 +2,21 @@ require 'test_helper'
module Elasticsearch
module Test
- class CatPluginsTest < ::Test::Unit::TestCase
+ class CatTasksTest < ::Test::Unit::TestCase
- context "Cat: Plugins" do
+ context "Cat: Tasks" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '/_cat/plugins', url
+ assert_equal '_cat/tasks', url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cat.plugins
+ subject.cat.tasks
end
end
diff --git a/elasticsearch-api/test/unit/cluster/pending_tasks_test.rb b/elasticsearch-api/test/unit/cluster/allocation_explain_test.rb
similarity index 61%
copy from elasticsearch-api/test/unit/cluster/pending_tasks_test.rb
copy to elasticsearch-api/test/unit/cluster/allocation_explain_test.rb
index 01b31c0..1d6f46a 100644
--- a/elasticsearch-api/test/unit/cluster/pending_tasks_test.rb
+++ b/elasticsearch-api/test/unit/cluster/allocation_explain_test.rb
@@ -2,21 +2,21 @@ require 'test_helper'
module Elasticsearch
module Test
- class ClusterPendingTasksTest < ::Test::Unit::TestCase
+ class ClusterAllocationExplainTest < ::Test::Unit::TestCase
- context "Cluster: Pending tasks" do
+ context "Cluster: Allocation explain" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '/_cluster/pending_tasks', url
+ assert_equal '_cluster/allocation/explain', url
assert_equal Hash.new, params
- assert_nil body
+ assert_equal nil, body
true
end.returns(FakeResponse.new)
- subject.cluster.pending_tasks
+ subject.cluster.allocation_explain
end
end
diff --git a/elasticsearch-api/test/unit/cluster/health_test.rb b/elasticsearch-api/test/unit/cluster/health_test.rb
index 2e5382d..0065b6d 100644
--- a/elasticsearch-api/test/unit/cluster/health_test.rb
+++ b/elasticsearch-api/test/unit/cluster/health_test.rb
@@ -31,6 +31,15 @@ module Elasticsearch
subject.cluster.health :level => 'indices'
end
+ should "return health for a specific index" do
+ subject.expects(:perform_request).with do |method, url, params, body|
+ assert_equal '_cluster/health/foo', url
+ true
+ end.returns(FakeResponse.new)
+
+ subject.cluster.health :index => 'foo'
+ end
+
end
end
diff --git a/elasticsearch-api/test/unit/cluster/pending_tasks_test.rb b/elasticsearch-api/test/unit/cluster/pending_tasks_test.rb
index 01b31c0..acef327 100644
--- a/elasticsearch-api/test/unit/cluster/pending_tasks_test.rb
+++ b/elasticsearch-api/test/unit/cluster/pending_tasks_test.rb
@@ -10,7 +10,7 @@ module Elasticsearch
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '/_cluster/pending_tasks', url
+ assert_equal '_cluster/pending_tasks', url
assert_equal Hash.new, params
assert_nil body
true
diff --git a/elasticsearch-api/test/unit/cat/plugins_test.rb b/elasticsearch-api/test/unit/cluster/stats_test.rb
similarity index 72%
copy from elasticsearch-api/test/unit/cat/plugins_test.rb
copy to elasticsearch-api/test/unit/cluster/stats_test.rb
index d81391d..9132324 100644
--- a/elasticsearch-api/test/unit/cat/plugins_test.rb
+++ b/elasticsearch-api/test/unit/cluster/stats_test.rb
@@ -2,21 +2,21 @@ require 'test_helper'
module Elasticsearch
module Test
- class CatPluginsTest < ::Test::Unit::TestCase
+ class ClusterStatsTest < ::Test::Unit::TestCase
- context "Cat: Plugins" do
+ context "Cluster: Stats" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '/_cat/plugins', url
+ assert_equal '_cluster/stats', url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cat.plugins
+ subject.cluster.stats
end
end
diff --git a/elasticsearch-api/test/unit/indices/flush_synced_test.rb b/elasticsearch-api/test/unit/indices/flush_synced_test.rb
new file mode 100644
index 0000000..f15fa3b
--- /dev/null
+++ b/elasticsearch-api/test/unit/indices/flush_synced_test.rb
@@ -0,0 +1,41 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ class IndicesFlushSyncedTest < ::Test::Unit::TestCase
+
+ context "Indices: Flush synced" do
+ subject { FakeClient.new }
+
+ should "perform correct request" do
+ subject.expects(:perform_request).with do |method, url, params, body|
+ assert_equal 'POST', method
+ assert_equal 'foo/_flush/synced', url
+ assert_equal Hash.new, params
+ assert_nil body
+ true
+ end.returns(FakeResponse.new)
+
+ subject.indices.flush_synced :index => 'foo'
+ end
+
+ should "raise a NotFound exception" do
+ subject.expects(:perform_request).raises(NotFound)
+
+ assert_raise NotFound do
+ subject.indices.flush_synced :index => 'foo'
+ end
+ end
+
+ should "catch a NotFound exception with the ignore parameter" do
+ subject.expects(:perform_request).raises(NotFound)
+
+ assert_nothing_raised do
+ subject.indices.flush_synced :index => 'foo', :ignore => 404
+ end
+ end
+ end
+
+ end
+ end
+end
diff --git a/elasticsearch-api/test/unit/cat/plugins_test.rb b/elasticsearch-api/test/unit/indices/forcemerge_test.rb
similarity index 64%
copy from elasticsearch-api/test/unit/cat/plugins_test.rb
copy to elasticsearch-api/test/unit/indices/forcemerge_test.rb
index d81391d..b6a9e46 100644
--- a/elasticsearch-api/test/unit/cat/plugins_test.rb
+++ b/elasticsearch-api/test/unit/indices/forcemerge_test.rb
@@ -2,21 +2,21 @@ require 'test_helper'
module Elasticsearch
module Test
- class CatPluginsTest < ::Test::Unit::TestCase
+ class IndicesForcemergeTest < ::Test::Unit::TestCase
- context "Cat: Plugins" do
+ context "Indices: Forcemerge" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
- assert_equal 'GET', method
- assert_equal '/_cat/plugins', url
+ assert_equal 'POST', method
+ assert_equal '_forcemerge', url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cat.plugins
+ subject.indices.forcemerge
end
end
diff --git a/elasticsearch-api/test/unit/cat/plugins_test.rb b/elasticsearch-api/test/unit/indices/shard_stores_test.rb
similarity index 70%
copy from elasticsearch-api/test/unit/cat/plugins_test.rb
copy to elasticsearch-api/test/unit/indices/shard_stores_test.rb
index d81391d..103a15e 100644
--- a/elasticsearch-api/test/unit/cat/plugins_test.rb
+++ b/elasticsearch-api/test/unit/indices/shard_stores_test.rb
@@ -2,21 +2,21 @@ require 'test_helper'
module Elasticsearch
module Test
- class CatPluginsTest < ::Test::Unit::TestCase
+ class IndicesShardStoresTest < ::Test::Unit::TestCase
- context "Cat: Plugins" do
+ context "Indices: Shard stores" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '/_cat/plugins', url
+ assert_equal '_shard_stores', url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cat.plugins
+ subject.indices.shard_stores
end
end
diff --git a/elasticsearch-api/test/unit/indices/status_test.rb b/elasticsearch-api/test/unit/indices/status_test.rb
index 7295492..1b1b8b1 100644
--- a/elasticsearch-api/test/unit/indices/status_test.rb
+++ b/elasticsearch-api/test/unit/indices/status_test.rb
@@ -57,6 +57,14 @@ module Elasticsearch
subject.indices.status :index => 'foo^bar'
end
+ should "catch a NotFound exception with the ignore parameter" do
+ subject.expects(:perform_request).raises(NotFound)
+
+ assert_nothing_raised do
+ subject.indices.status :index => 'foo^bar', :ignore => 404
+ end
+ end
+
end
end
diff --git a/elasticsearch-api/test/unit/ingest/delete_pipeline_test.rb b/elasticsearch-api/test/unit/ingest/delete_pipeline_test.rb
new file mode 100644
index 0000000..8ca1cfe
--- /dev/null
+++ b/elasticsearch-api/test/unit/ingest/delete_pipeline_test.rb
@@ -0,0 +1,41 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ class IngestDeletePipelineTest < ::Test::Unit::TestCase
+
+ context "Ingest: Delete pipeline" do
+ subject { FakeClient.new }
+
+ should "require the :id argument" do
+ assert_raise ArgumentError do
+ subject.ingest.delete_pipeline
+ end
+ end
+
+ should "perform correct request" do
+ subject.expects(:perform_request).with do |method, url, params, body|
+ assert_equal 'DELETE', method
+ assert_equal '_ingest/pipeline/foo', url
+ assert_equal Hash.new, params
+ assert_nil body
+ true
+ end.returns(FakeResponse.new)
+
+ subject.ingest.delete_pipeline :id => 'foo'
+ end
+
+ should "URL-escape the ID" do
+ subject.expects(:perform_request).with do |method, url, params, body|
+ assert_equal '_ingest/pipeline/foo%5Ebar', url
+ true
+ end.returns(FakeResponse.new)
+
+ subject.ingest.delete_pipeline :id => 'foo^bar'
+ end
+
+ end
+
+ end
+ end
+end
diff --git a/elasticsearch-api/test/unit/cluster/health_test.rb b/elasticsearch-api/test/unit/ingest/get_pipeline_test.rb
similarity index 53%
copy from elasticsearch-api/test/unit/cluster/health_test.rb
copy to elasticsearch-api/test/unit/ingest/get_pipeline_test.rb
index 2e5382d..b779dd8 100644
--- a/elasticsearch-api/test/unit/cluster/health_test.rb
+++ b/elasticsearch-api/test/unit/ingest/get_pipeline_test.rb
@@ -2,33 +2,36 @@ require 'test_helper'
module Elasticsearch
module Test
- class Cluster_Test < ::Test::Unit::TestCase
+ class IngestGetPipelineTest < ::Test::Unit::TestCase
- context "Health" do
+ context "Ingest: Get pipeline" do
subject { FakeClient.new }
+ should "require the :id argument" do
+ assert_raise ArgumentError do
+ subject.ingest.get_pipeline
+ end
+ end
+
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '_cluster/health', url
+ assert_equal '_ingest/pipeline/foo', url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cluster.health
+ subject.ingest.get_pipeline :id => 'foo'
end
- should "encode URL parameters" do
+ should "URL-escape the ID" do
subject.expects(:perform_request).with do |method, url, params, body|
- assert_equal 'GET', method
- assert_equal '_cluster/health', url
- assert_equal({:level => 'indices'}, params)
- assert_nil body
+ assert_equal '_ingest/pipeline/foo%5Ebar', url
true
end.returns(FakeResponse.new)
- subject.cluster.health :level => 'indices'
+ subject.ingest.get_pipeline :id => 'foo^bar'
end
end
diff --git a/elasticsearch-api/test/unit/ingest/put_pipeline_test.rb b/elasticsearch-api/test/unit/ingest/put_pipeline_test.rb
new file mode 100644
index 0000000..7102ddf
--- /dev/null
+++ b/elasticsearch-api/test/unit/ingest/put_pipeline_test.rb
@@ -0,0 +1,46 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ class IngestPutPipelineTest < ::Test::Unit::TestCase
+
+ context "Ingest: Put pipeline" do
+ subject { FakeClient.new }
+
+ should "require the :id argument" do
+ assert_raise ArgumentError do
+ subject.ingest.put_pipeline :body => {}
+ end
+ end
+
+ should "require the :body argument" do
+ assert_raise ArgumentError do
+ subject.ingest.put_pipeline :id => 'foo'
+ end
+ end
+
+ should "perform correct request" do
+ subject.expects(:perform_request).with do |method, url, params, body|
+ assert_equal 'PUT', method
+ assert_equal '_ingest/pipeline/foo', url
+ assert_equal Hash.new, params
+ assert_equal Hash.new, body
+ true
+ end.returns(FakeResponse.new)
+
+ subject.ingest.put_pipeline :id => 'foo', :body => {}
+ end
+
+ should "URL-escape the ID" do
+ subject.expects(:perform_request).with do |method, url, params, body|
+ assert_equal '_ingest/pipeline/foo%5Ebar', url
+ true
+ end.returns(FakeResponse.new)
+
+ subject.ingest.put_pipeline :id => 'foo^bar', :body => {}
+ end
+ end
+
+ end
+ end
+end
diff --git a/elasticsearch-api/test/unit/cluster/health_test.rb b/elasticsearch-api/test/unit/ingest/simulate_test.rb
similarity index 54%
copy from elasticsearch-api/test/unit/cluster/health_test.rb
copy to elasticsearch-api/test/unit/ingest/simulate_test.rb
index 2e5382d..851cf02 100644
--- a/elasticsearch-api/test/unit/cluster/health_test.rb
+++ b/elasticsearch-api/test/unit/ingest/simulate_test.rb
@@ -2,33 +2,30 @@ require 'test_helper'
module Elasticsearch
module Test
- class Cluster_Test < ::Test::Unit::TestCase
+ class IngestSimulateTest < ::Test::Unit::TestCase
- context "Health" do
+ context "Ingest: Simulate" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '_cluster/health', url
+ assert_equal '_ingest/pipeline/_simulate', url
assert_equal Hash.new, params
- assert_nil body
+ assert_equal Hash.new, body
true
end.returns(FakeResponse.new)
- subject.cluster.health
+ subject.ingest.simulate :body => {}
end
- should "encode URL parameters" do
+ should "perform correct request with a pipeline ID" do
subject.expects(:perform_request).with do |method, url, params, body|
- assert_equal 'GET', method
- assert_equal '_cluster/health', url
- assert_equal({:level => 'indices'}, params)
- assert_nil body
+ assert_equal '_ingest/pipeline/foo/_simulate', url
true
end.returns(FakeResponse.new)
- subject.cluster.health :level => 'indices'
+ subject.ingest.simulate :id => 'foo', :body => {}
end
end
diff --git a/elasticsearch-api/test/unit/percolate_test.rb b/elasticsearch-api/test/unit/percolate_test.rb
index dc68c68..4068c5c 100644
--- a/elasticsearch-api/test/unit/percolate_test.rb
+++ b/elasticsearch-api/test/unit/percolate_test.rb
@@ -40,6 +40,15 @@ module Elasticsearch
subject.percolate :index => 'foo^bar', :type => 'bar/bam', :body => { :doc => { :foo => 'bar' } }
end
+ should "URL-escape the parts (including document id)" do
+ subject.expects(:perform_request).with do |method, url, params, body|
+ assert_equal 'foo%5Ebar/bar%2Fbam/some%2Fid/_percolate', url
+ true
+ end.returns(FakeResponse.new)
+
+ subject.percolate :index => 'foo^bar', :type => 'bar/bam', :id => 'some/id'
+ end
+
end
end
diff --git a/elasticsearch-api/test/unit/ping_test.rb b/elasticsearch-api/test/unit/ping_test.rb
index 8b8d60a..514ac65 100644
--- a/elasticsearch-api/test/unit/ping_test.rb
+++ b/elasticsearch-api/test/unit/ping_test.rb
@@ -4,7 +4,7 @@ module Elasticsearch
module Test
class PingTest < ::Test::Unit::TestCase
- context "Indices: Exists" do
+ context "Ping" do
subject { FakeClient.new }
should "perform correct request" do
@@ -34,6 +34,11 @@ module Elasticsearch
assert_equal false, subject.ping
end
+ should "return false on 'connection failed' exceptions" do
+ subject.expects(:perform_request).raises(StandardError.new 'ConnectionFailed')
+ assert_equal false, subject.ping
+ end
+
should "re-raise generic exceptions" do
subject.expects(:perform_request).raises(StandardError)
assert_raise(StandardError) do
diff --git a/elasticsearch-api/test/unit/cat/plugins_test.rb b/elasticsearch-api/test/unit/reindex_test.rb
similarity index 60%
copy from elasticsearch-api/test/unit/cat/plugins_test.rb
copy to elasticsearch-api/test/unit/reindex_test.rb
index d81391d..6aeef5b 100644
--- a/elasticsearch-api/test/unit/cat/plugins_test.rb
+++ b/elasticsearch-api/test/unit/reindex_test.rb
@@ -2,21 +2,21 @@ require 'test_helper'
module Elasticsearch
module Test
- class CatPluginsTest < ::Test::Unit::TestCase
+ class ReindexTest < ::Test::Unit::TestCase
- context "Cat: Plugins" do
+ context "Reindex" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
- assert_equal 'GET', method
- assert_equal '/_cat/plugins', url
+ assert_equal 'POST', method
+ assert_equal '_reindex', url
assert_equal Hash.new, params
- assert_nil body
+ assert_equal Hash.new, body
true
end.returns(FakeResponse.new)
- subject.cat.plugins
+ subject.reindex :body => {}
end
end
diff --git a/elasticsearch-api/test/unit/cat/plugins_test.rb b/elasticsearch-api/test/unit/render_search_template_test.rb
similarity index 51%
copy from elasticsearch-api/test/unit/cat/plugins_test.rb
copy to elasticsearch-api/test/unit/render_search_template_test.rb
index d81391d..bb496d3 100644
--- a/elasticsearch-api/test/unit/cat/plugins_test.rb
+++ b/elasticsearch-api/test/unit/render_search_template_test.rb
@@ -2,23 +2,22 @@ require 'test_helper'
module Elasticsearch
module Test
- class CatPluginsTest < ::Test::Unit::TestCase
+ class RenderSearchTemplateTest < ::Test::Unit::TestCase
- context "Cat: Plugins" do
+ context "Render search template" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '/_cat/plugins', url
- assert_equal Hash.new, params
- assert_nil body
+ assert_equal '_render/template', url
+ assert_equal({ :id => 'foo' }, params)
+ assert_equal({ :foo => 'bar' }, body)
true
end.returns(FakeResponse.new)
- subject.cat.plugins
+ subject.render_search_template :id => 'foo', :body => { :foo => 'bar' }
end
-
end
end
diff --git a/elasticsearch-api/test/unit/search_test.rb b/elasticsearch-api/test/unit/search_test.rb
index a65df64..a0239c2 100644
--- a/elasticsearch-api/test/unit/search_test.rb
+++ b/elasticsearch-api/test/unit/search_test.rb
@@ -101,6 +101,14 @@ module Elasticsearch
subject.search :index => 'foo^bar', :type => 'bar/bam'
end
+ should "not URL-escape the fields parameters" do
+ subject.expects(:perform_request).with do |method, url, params, body|
+ assert_equal 'foo^bar', params[:fields]
+ true
+ end.returns(FakeResponse.new)
+
+ subject.search :index => 'foo', :type => 'bar', :fields => 'foo^bar'
+ end
end
end
diff --git a/elasticsearch-api/test/unit/cluster/health_test.rb b/elasticsearch-api/test/unit/tasks/cancel_test.rb
similarity index 56%
copy from elasticsearch-api/test/unit/cluster/health_test.rb
copy to elasticsearch-api/test/unit/tasks/cancel_test.rb
index 2e5382d..408f10e 100644
--- a/elasticsearch-api/test/unit/cluster/health_test.rb
+++ b/elasticsearch-api/test/unit/tasks/cancel_test.rb
@@ -2,33 +2,33 @@ require 'test_helper'
module Elasticsearch
module Test
- class Cluster_Test < ::Test::Unit::TestCase
+ class TasksCancelTest < ::Test::Unit::TestCase
- context "Health" do
+ context "Tasks: Cancel" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
- assert_equal 'GET', method
- assert_equal '_cluster/health', url
+ assert_equal 'POST', method
+ assert_equal '_tasks/_cancel', url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cluster.health
+ subject.tasks.cancel
end
- should "encode URL parameters" do
+ should "perform correct request with a task_id" do
subject.expects(:perform_request).with do |method, url, params, body|
- assert_equal 'GET', method
- assert_equal '_cluster/health', url
- assert_equal({:level => 'indices'}, params)
+ assert_equal 'POST', method
+ assert_equal '_tasks/foo/_cancel', url
+ assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cluster.health :level => 'indices'
+ subject.tasks.cancel :task_id => 'foo'
end
end
diff --git a/elasticsearch-api/test/unit/cluster/health_test.rb b/elasticsearch-api/test/unit/tasks/list_test.rb
similarity index 64%
copy from elasticsearch-api/test/unit/cluster/health_test.rb
copy to elasticsearch-api/test/unit/tasks/list_test.rb
index 2e5382d..7ce1f86 100644
--- a/elasticsearch-api/test/unit/cluster/health_test.rb
+++ b/elasticsearch-api/test/unit/tasks/list_test.rb
@@ -2,33 +2,33 @@ require 'test_helper'
module Elasticsearch
module Test
- class Cluster_Test < ::Test::Unit::TestCase
+ class TasksListTest < ::Test::Unit::TestCase
- context "Health" do
+ context "Tasks: List" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '_cluster/health', url
+ assert_equal '_tasks', url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cluster.health
+ subject.tasks.list
end
- should "encode URL parameters" do
+ should "perform correct request with :task_id" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
- assert_equal '_cluster/health', url
- assert_equal({:level => 'indices'}, params)
+ assert_equal '_tasks/foo', url
+ assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
- subject.cluster.health :level => 'indices'
+ subject.tasks.list :task_id => 'foo'
end
end
diff --git a/elasticsearch-api/test/unit/cluster/pending_tasks_test.rb b/elasticsearch-api/test/unit/update_by_query_test.rb
similarity index 57%
copy from elasticsearch-api/test/unit/cluster/pending_tasks_test.rb
copy to elasticsearch-api/test/unit/update_by_query_test.rb
index 01b31c0..2d355f7 100644
--- a/elasticsearch-api/test/unit/cluster/pending_tasks_test.rb
+++ b/elasticsearch-api/test/unit/update_by_query_test.rb
@@ -2,21 +2,21 @@ require 'test_helper'
module Elasticsearch
module Test
- class ClusterPendingTasksTest < ::Test::Unit::TestCase
+ class UpdateByQueryTest < ::Test::Unit::TestCase
- context "Cluster: Pending tasks" do
+ context "Update by query" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
- assert_equal 'GET', method
- assert_equal '/_cluster/pending_tasks', url
+ assert_equal 'POST', method
+ assert_equal 'foo/_update_by_query', url
assert_equal Hash.new, params
- assert_nil body
+ assert_equal nil, body
true
end.returns(FakeResponse.new)
- subject.cluster.pending_tasks
+ subject.update_by_query :index => 'foo'
end
end
diff --git a/elasticsearch-api/test/unit/utils_test.rb b/elasticsearch-api/test/unit/utils_test.rb
index fd3281b..5d96d84 100644
--- a/elasticsearch-api/test/unit/utils_test.rb
+++ b/elasticsearch-api/test/unit/utils_test.rb
@@ -61,6 +61,10 @@ module Elasticsearch
assert_equal 'foo,bar%5Ebam', __listify(['foo', 'bar^bam'])
end
+ should "not encode special characters when the :escape option is set" do
+ assert_equal 'foo,bar^bam', __listify(['foo', 'bar^bam'], :escape => false)
+ end
+
end
context "__pathify" do
@@ -128,8 +132,8 @@ module Elasticsearch
PAYLOAD
end
- should "not modify the original payload" do
- original = [ { :index => {:foo => 'bar', :data => { :moo => 'bam' }} } ]
+ should "not modify the original payload with the data option" do
+ original = [ { :index => {:foo => 'bar', :data => {:moo => 'bam'} } } ]
result = Elasticsearch::API::Utils.__bulkify original
assert_not_nil original.first[:index][:data], "Deleted :data from #{original}"
assert_equal <<-PAYLOAD.gsub(/^\s+/, ''), result
@@ -138,6 +142,23 @@ module Elasticsearch
PAYLOAD
end
+ should "not modify the original payload with meta/data pairs" do
+ original = [ { :index => {:foo => 'bar'} }, { :data => {:a => 'b', :data => {:c => 'd'} } } ]
+ result = Elasticsearch::API::Utils.__bulkify original
+
+ assert_not_nil original.last[:data], "Deleted :data from #{original}"
+ assert_not_nil original.last[:data][:data], "Deleted :data from #{original}"
+
+ lines = result.split("\n")
+ assert_equal 2, lines.size
+
+ header = MultiJson.load(lines.first)
+ data = MultiJson.load(lines.last)
+
+ assert_equal 'bar', header['index']['foo']
+ assert_equal 'b', data['data']['a']
+ assert_equal 'd', data['data']['data']['c']
+ end
end
context "__validate_and_extract_params" do
@@ -186,6 +207,11 @@ module Elasticsearch
result = __validate_and_extract_params( { :foo => ['a', 'b'] }, [:foo] )
assert_equal( { :foo => 'a,b'}, result )
end
+
+ should "not escape the parameters" do
+ result = __validate_and_extract_params( { :foo => ['a.*', 'b.*'] }, [:foo] )
+ assert_equal( { :foo => 'a.*,b.*'}, result )
+ end
end
context "__extract_parts" do
@@ -222,6 +248,65 @@ module Elasticsearch
end
+ context "__report_unsupported_parameters" do
+ should "print used unsupported parameters passed as Symbols" do
+ arguments = { :foo => 'bar', :moo => 'bam', :baz => 'qux' }
+ unsupported_params = [:foo, :moo]
+
+ STDERR.expects(:puts).with do |message|
+ assert_equal 2, message.split("\n").size
+ true
+ end
+
+ __report_unsupported_parameters(arguments, unsupported_params)
+ end
+
+ should "print used unsupported parameters passed as Hashes" do
+ arguments = { :foo => 'bar', :moo => 'bam', :baz => 'qux' }
+ unsupported_params = [ { :foo => { :explanation => 'NOT_SUPPORTED' } } ]
+
+ STDERR.expects(:puts).with do |message|
+ assert_match /NOT_SUPPORTED/, message
+ assert_equal 1, message.split("\n").size
+ true
+ end
+
+ __report_unsupported_parameters(arguments, unsupported_params)
+ end
+
+ should "print used unsupported parameters passed as a mix of Symbols and Hashes" do
+ arguments = { :foo => 'bar', :moo => 'bam', :baz => 'qux' }
+ unsupported_params = [ { :foo => { :explanation => 'NOT_SUPPORTED'} }, :moo ]
+
+ STDERR.expects(:puts).with do |message|
+ assert_match /NOT_SUPPORTED/, message
+ assert_equal 2, message.split("\n").size
+ true
+ end
+
+ __report_unsupported_parameters(arguments, unsupported_params)
+ end
+
+ should "not print unused unsupported parameters" do
+ arguments = { :moo => 'bam', :baz => 'qux' }
+ unsupported_params = [:foo]
+
+ STDERR.expects(:puts).never
+
+ __report_unsupported_parameters(arguments, unsupported_params)
+ end
+ end
+
+ context "__report_unsupported_method" do
+ should "print the warning" do
+ STDERR.expects(:puts).with do |message|
+ assert_match /foo/, message
+ true
+ end
+
+ __report_unsupported_method(:foo)
+ end
+ end
end
end
end
diff --git a/elasticsearch-dsl/README.md b/elasticsearch-dsl/README.md
index 56fa526..905bf4f 100644
--- a/elasticsearch-dsl/README.md
+++ b/elasticsearch-dsl/README.md
@@ -108,7 +108,13 @@ All Elasticsearch DSL features are supported, namely:
* [Pagination](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-from-size.html)
* [Options](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-body.html) (source filtering, highlighting, etc)
-An example of a complex search definition would look like this:
+An example of a complex search definition is below.
+
+**NOTE:** In order to run the example, you have to allow restoring from the `data.elasticsearch.org` repository by adding the following configuration line to your `elasticsearch.yml`:
+
+```yaml
+repositories.url.allowed_urls: ["https://s3.amazonaws.com/data.elasticsearch.org/*"]
+```
```ruby
require 'awesome_print'
@@ -120,18 +126,17 @@ include Elasticsearch::DSL
client = Elasticsearch::Client.new transport_options: { request: { timeout: 3600, open_timeout: 3600 } }
-# Restore an index from a snapshot
-#
+puts "Recovering the 'bicycles.stackexchange.com' index...".yellow
+
client.indices.delete index: 'bicycles.stackexchange.com', ignore: 404
-puts "Recovering the 'bicycles.stackexchange.com' index...".gray
client.snapshot.create_repository repository: 'data.elasticsearch.org', body: { type: 'url', settings: { url: 'https://s3.amazonaws.com/data.elasticsearch.org/bicycles.stackexchange.com/' } }
client.snapshot.restore repository: 'data.elasticsearch.org', snapshot: 'bicycles.stackexchange.com', body: { indices: 'bicycles.stackexchange.com' }
until client.cluster.health(level: 'indices')['indices']['bicycles.stackexchange.com']['status'] == 'green'
- r = client.indices.status(index: 'bicycles.stackexchange.com', human: true, recovery: true)['indices']['bicycles.stackexchange.com']['shards']['0'][0]
- print "\r#{r['index']['size']} of #{r['gateway_recovery']['index']['expected_recovered_size']}".ljust(52).gray
+ r = client.indices.recovery(index: 'bicycles.stackexchange.com', human: true)['bicycles.stackexchange.com']['shards'][0] rescue nil
+ print "\r#{r['index']['size']['recovered'] rescue '0b'} of #{r['index']['size']['total'] rescue 'N/A'}".ljust(52).gray
sleep 1
-end
+end; puts
# The search definition
#
@@ -164,9 +169,9 @@ definition = search {
end
end
- # Multiply the default `_score` by a (slightly normalized) document rating
+ # Multiply the default `_score` by the document rating
#
- functions << { script_score: { script: '_score * log10( doc["rating"].value )' } }
+ functions << { script_score: { script: '_score * doc["rating"].value' } }
end
end
@@ -175,6 +180,12 @@ definition = search {
aggregation :tags do
terms do
field 'tags'
+
+ # Calculate average view count per tag (inner aggregation)
+ #
+ aggregation :avg_view_count do
+ avg field: 'view_count'
+ end
end
end
@@ -185,6 +196,12 @@ definition = search {
field 'creation_date'
interval 'month'
format 'yyyy-MM'
+
+ # Calculate the statistics on comment count per day (inner aggregation)
+ #
+ aggregation :comments do
+ stats field: 'comment_count'
+ end
end
end
@@ -206,14 +223,14 @@ definition = search {
source ['title', 'tags', 'creation_date', 'rating', 'user.location', 'user.display_name']
}
-puts "Search definition #{'-'*63}\n".gray
+puts "Search definition #{'-'*63}\n".yellow
ap definition.to_hash
# Execute the search request
#
response = client.search index: 'bicycles.stackexchange.com', type: ['question','answer'], body: definition
-puts "\nSearch results #{'-'*66}\n".gray
+puts "\nSearch results #{'-'*66}\n".yellow
ap response
```
diff --git a/elasticsearch-dsl/elasticsearch-dsl.gemspec b/elasticsearch-dsl/elasticsearch-dsl.gemspec
index ae5c20c..af13a06 100644
--- a/elasticsearch-dsl/elasticsearch-dsl.gemspec
+++ b/elasticsearch-dsl/elasticsearch-dsl.gemspec
@@ -22,7 +22,7 @@ Gem::Specification.new do |s|
s.rdoc_options = [ "--charset=UTF-8" ]
s.add_development_dependency "bundler", "~> 1.3"
- s.add_development_dependency "rake"
+ s.add_development_dependency "rake", "~> 11.1"
s.add_development_dependency "elasticsearch"
s.add_development_dependency "elasticsearch-extensions"
@@ -37,4 +37,8 @@ Gem::Specification.new do |s|
s.add_development_dependency 'yard'
s.add_development_dependency 'cane'
s.add_development_dependency 'pry'
+
+ if defined?(RUBY_VERSION) && RUBY_VERSION > '2.2'
+ s.add_development_dependency "test-unit", '~> 2'
+ end
end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search.rb
index c13b90e..2cd75cc 100644
--- a/elasticsearch-dsl/lib/elasticsearch/dsl/search.rb
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search.rb
@@ -38,8 +38,8 @@ module Elasticsearch
attr_reader :aggregations
def initialize(*args, &block)
- @options = Options.new
- instance_eval(&block) if block
+ @options = Options.new *args
+ block.arity < 1 ? self.instance_eval(&block) : block.call(self) if block
end
# DSL method for building or accessing the `query` part of a search definition
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregation.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregation.rb
index 2a362b7..701c5c4 100644
--- a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregation.rb
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregation.rb
@@ -28,7 +28,7 @@ module Elasticsearch
end
end
- # Defines a nested aggregation
+ # Defines an aggregation nested in another one
#
def aggregation(*args, &block)
call
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/global.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/global.rb
index 6b4c42c..cc1c0f3 100644
--- a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/global.rb
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/global.rb
@@ -9,14 +9,18 @@ module Elasticsearch
#
# search do
# aggregation :all_documents do
- # global
+ # global do
+ # aggregation :avg_clicks do
+ # avg field: 'clicks'
+ # end
+ # end
# end
# end
#
- # @see http://elasticsearch.org/guide/en/elasticsearch/reference/current/search-aggregations-bucket-global-aggregation.html
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-global-aggregation.html
#
class Global
- include BaseComponent
+ include BaseAggregationComponent
end
end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/missing.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/missing.rb
new file mode 100644
index 0000000..1b031b7
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/missing.rb
@@ -0,0 +1,36 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A single bucket aggregation that creates a bucket of all documents
+ # which are missing a value for the field
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :articles_without_tags do
+ # missing field: 'tags'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # search do
+ # aggregation :articles_without_tags do
+ # missing do
+ # field 'tags'
+ # end
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/master/search-aggregations-bucket-missing-aggregation.html
+ #
+ class Missing
+ include BaseAggregationComponent
+
+ option_method :field
+ end
+
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/avg_bucket.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/avg_bucket.rb
new file mode 100644
index 0000000..f8dbcbe
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/avg_bucket.rb
@@ -0,0 +1,34 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A sibling pipeline aggregation which calculates the (mean) average value of a specified metric in a sibling aggregation.
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :avg_monthly_sales do
+ # avg_bucket buckets_path: 'sales_per_month>sales'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :avg_monthly_sales do
+ # avg_bucket do
+ # buckets_path 'sales_per_month>sales'
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-avg-bucket-aggregation.html
+ #
+ class AvgBucket
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :gap_policy
+ option_method :format
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/bucket_script.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/bucket_script.rb
new file mode 100644
index 0000000..3a55a57
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/bucket_script.rb
@@ -0,0 +1,36 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A parent pipeline aggregation which executes a script which can perform per bucket computations on specified metrics in the parent multi-bucket aggregation.
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :t-shirt-percentage do
+ # bucket_script buckets_path: { tShirtSales: 't-shirts>sales', totalSales: 'total_sales' }, script: 'tShirtSales / totalSales * 100'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :t-shirt-percentage do
+ # bucket_script do
+ # buckets_path tShirtSales: 't-shirts>sales', totalSales: 'total_sales'
+ # script 'tShirtSales / totalSales * 100'
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-bucket-script-aggregation.html
+ #
+ class BucketScript
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :script
+ option_method :gap_policy
+ option_method :format
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/bucket_selector.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/bucket_selector.rb
new file mode 100644
index 0000000..01794a2
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/bucket_selector.rb
@@ -0,0 +1,35 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A parent pipeline aggregation which executes a script which determines whether the current bucket will be retained in the parent multi-bucket aggregation.
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :sales_bucket_filter do
+ # bucket_selector buckets_path: { totalSales: 'total_sales' }, script: 'totalSales <= 50'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :sales_bucket_filter do
+ # bucket_selector do
+ # buckets_path totalSales: 'total_sales'
+ # script 'totalSales <= 50'
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-bucket-selector-aggregation.html
+ #
+ class BucketSelector
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :script
+ option_method :gap_policy
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/cumulative_sum.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/cumulative_sum.rb
new file mode 100644
index 0000000..4cda716
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/cumulative_sum.rb
@@ -0,0 +1,33 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A parent pipeline aggregation which calculates the cumulative sum of a specified metric in a parent histogram (or date_histogram) aggregation.
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :cumulative_sales do
+ # cumulative_sum buckets_path: 'sales'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :cumulative_sales do
+ # cumulative_sum do
+ # buckets_path 'sales'
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-cumulative-sum-aggregation.html
+ #
+ class CumulativeSum
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :format
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/derivative.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/derivative.rb
new file mode 100644
index 0000000..416d69d
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/derivative.rb
@@ -0,0 +1,34 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A parent pipeline aggregation which calculates the derivative of a specified metric in a parent histogram (or date_histogram) aggregation.
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :sales_deriv do
+ # derivative buckets_path: 'sales'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :sales_deriv do
+ # derivative do
+ # buckets_path 'sales'
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-derivative-aggregation.html
+ #
+ class Derivative
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :gap_policy
+ option_method :format
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/extended_stats_bucket.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/extended_stats_bucket.rb
new file mode 100644
index 0000000..c880ec4
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/extended_stats_bucket.rb
@@ -0,0 +1,34 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A sibling pipeline aggregation which calculates a variety of stats across all bucket of a specified metric in a sibling aggregation. The specified metric must be numeric and the sibling aggregation must be a multi-bucket aggregation.
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :stats_monthly_sales do
+ # extended_stats_bucket buckets_path: 'sales_per_month>sales'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :stats_monthly_sales do
+ # extended_stats_bucket do
+ # buckets_path 'sales_per_month>sales'
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-extended-stats-bucket-aggregation.html
+ #
+ class ExtendedStatsBucket
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :gap_policy
+ option_method :format
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/max_bucket.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/max_bucket.rb
new file mode 100644
index 0000000..a18b970
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/max_bucket.rb
@@ -0,0 +1,34 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A sibling pipeline aggregation which identifies the bucket(s) with the maximum value of a specified metric in a sibling aggregation and outputs both the value and the key(s) of the bucket(s).
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :max_monthly_sales do
+ # max_bucket buckets_path: 'sales_per_month>sales'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :max_monthly_sales do
+ # max_bucket do
+ # buckets_path 'sales_per_month>sales'
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-max-bucket-aggregation.html
+ #
+ class MaxBucket
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :gap_policy
+ option_method :format
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/min_bucket.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/min_bucket.rb
new file mode 100644
index 0000000..8213156
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/min_bucket.rb
@@ -0,0 +1,34 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A sibling pipeline aggregation which identifies the bucket(s) with the minimum value of a specified metric in a sibling aggregation and outputs both the value and the key(s) of the bucket(s).
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :min_monthly_sales do
+ # min_bucket buckets_path: 'sales_per_month>sales'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :min_monthly_sales do
+ # min_bucket do
+ # buckets_path 'sales_per_month>sales'
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-min-bucket-aggregation.html
+ #
+ class MinBucket
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :gap_policy
+ option_method :format
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/moving_avg.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/moving_avg.rb
new file mode 100644
index 0000000..e2134de
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/moving_avg.rb
@@ -0,0 +1,42 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # Given an ordered series of data, the Moving Average aggregation will slide a window across the data and emit the average value of that window.
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :the_movavg do
+ # moving_avg buckets_path: 'the_sum'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :the_movavg do
+ # moving_avg do
+ # buckets_path 'the_sum'
+ # model 'holt'
+ # window 5
+ # gap_policy 'insert_zero'
+ # settings({ alpha: 0.5 })
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-movavg-aggregation.html
+ #
+ class MovingAvg
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :model
+ option_method :gap_policy
+ option_method :window
+ option_method :format
+ option_method :minimize
+ option_method :settings
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/percentiles_bucket.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/percentiles_bucket.rb
new file mode 100644
index 0000000..24620b2
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/percentiles_bucket.rb
@@ -0,0 +1,36 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A sibling pipeline aggregation which calculates percentiles across all bucket of a specified metric in a sibling aggregation.
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :sum_monthly_sales do
+ # percentiles_bucket buckets_path: 'sales_per_month>sales'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :sum_monthly_sales do
+ # percentiles_bucket do
+ # buckets_path 'sales_per_month>sales'
+ # percents [25.0 50.0 75.0]
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-percentiles-bucket-aggregation.html
+ #
+ class PercentilesBucket
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :gap_policy
+ option_method :format
+ option_method :percents
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/serial_diff.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/serial_diff.rb
new file mode 100644
index 0000000..2867227
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/serial_diff.rb
@@ -0,0 +1,36 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # Serial differencing is a technique where values in a time series are subtracted from itself at different time lags or periods.
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :thirtieth_difference do
+ # serial_diff buckets_path: 'the_sum'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :thirtieth_difference do
+ # serial_diff do
+ # buckets_path 'the_sum'
+ # lag 30
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-serialdiff-aggregation.html
+ #
+ class SerialDiff
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :lag
+ option_method :gap_policy
+ option_method :format
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/stats_bucket.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/stats_bucket.rb
new file mode 100644
index 0000000..1b50593
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/stats_bucket.rb
@@ -0,0 +1,34 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A sibling pipeline aggregation which calculates a variety of stats across all bucket of a specified metric in a sibling aggregation.
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :stats_monthly_sales do
+ # stats_bucket buckets_path: 'sales_per_month>sales'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :stats_monthly_sales do
+ # stats_bucket do
+ # buckets_path 'sales_per_month>sales'
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-stats-bucket-aggregation.html
+ #
+ class StatsBucket
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :gap_policy
+ option_method :format
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/sum_bucket.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/sum_bucket.rb
new file mode 100644
index 0000000..11dfd1c
--- /dev/null
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/pipeline/sum_bucket.rb
@@ -0,0 +1,34 @@
+module Elasticsearch
+ module DSL
+ module Search
+ module Aggregations
+
+ # A sibling pipeline aggregation which calculates the sum across all bucket of a specified metric in a sibling aggregation.
+ #
+ # @example Passing the options as a Hash
+ #
+ # aggregation :sum_monthly_sales do
+ # sum_bucket buckets_path: 'sales_per_month>sales'
+ # end
+ #
+ # @example Passing the options as a block
+ #
+ # aggregation :sum_monthly_sales do
+ # sum_bucket do
+ # buckets_path 'sales_per_month>sales'
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-sum-bucket-aggregation.html
+ #
+ class SumBucket
+ include BaseAggregationComponent
+
+ option_method :buckets_path
+ option_method :gap_policy
+ option_method :format
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/stats.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/stats.rb
index f1b9df9..2b5839c 100644
--- a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/stats.rb
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/stats.rb
@@ -5,7 +5,7 @@ module Elasticsearch
# A multi-value metrics aggregation which returns statistical information on numeric values
#
- # @example
+ # @example Passing the options as a Hash
#
# search do
# aggregation :clicks_stats do
@@ -13,10 +13,23 @@ module Elasticsearch
# end
# end
#
- # @see http://elasticsearch.org/guide/en/elasticsearch/reference/current/search-aggregations-metrics-stats-aggregation.html
+ # @example Passing the options as a block
+ #
+ # search do
+ # aggregation :clicks_stats do
+ # stats do
+ # field 'clicks'
+ # end
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-stats-aggregation.html
#
class Stats
include BaseComponent
+
+ option_method :field
+ option_method :script
end
end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/terms.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/terms.rb
index 369f135..2494f52 100644
--- a/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/terms.rb
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/aggregations/terms.rb
@@ -5,13 +5,23 @@ module Elasticsearch
# A multi-bucket aggregation which returns the collection of terms and their document counts
#
- # @example
+ # @example Passing the options as a Hash
#
# aggregation :tags do
# terms field: 'tags'
# end
#
- # @see http://elasticsearch.org/guide/en/elasticsearch/reference/current/search-aggregations-bucket-terms-aggregation.html
+ # @example Passing the options as a block
+ #
+ # search do
+ # aggregation :tags do
+ # terms do
+ # field 'tags'
+ # end
+ # end
+ # end
+ #
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-terms-aggregation.html
#
class Terms
include BaseAggregationComponent
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/filters/range.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/filters/range.rb
index 8256b0e..018d37f 100644
--- a/elasticsearch-dsl/lib/elasticsearch/dsl/search/filters/range.rb
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/filters/range.rb
@@ -31,6 +31,7 @@ module Elasticsearch
option_method :lt
option_method :boost
option_method :time_zone
+ option_method :format
end
end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/bool.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/bool.rb
index 5b00fcc..63bbec2 100644
--- a/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/bool.rb
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/bool.rb
@@ -57,6 +57,11 @@ module Elasticsearch
self
end
+ def filter(*args, &block)
+ @filter = block ? Filter.new(*args, &block) : args.first
+ self
+ end
+
def to_hash
@hash[name].update(@args.to_hash) if @args.respond_to?(:to_hash)
@@ -66,6 +71,11 @@ module Elasticsearch
@hash[name] = @args unless @args.nil? || @args.empty?
end
+ if @filter
+ _filter = @filter.respond_to?(:to_hash) ? @filter.to_hash : @filter
+ @hash[name].update(filter: _filter)
+ end
+
@hash
end
end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/match.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/match.rb
index d829922..07b7fcc 100644
--- a/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/match.rb
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/match.rb
@@ -26,6 +26,7 @@ module Elasticsearch
option_method :operator
option_method :type
option_method :boost
+ option_method :fuzziness
end
end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/range.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/range.rb
index 259d33d..3e07192 100644
--- a/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/range.rb
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/queries/range.rb
@@ -39,6 +39,7 @@ module Elasticsearch
option_method :lt
option_method :boost
option_method :time_zone
+ option_method :format
end
end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/search/sort.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/search/sort.rb
index 0a45697..687d0bf 100644
--- a/elasticsearch-dsl/lib/elasticsearch/dsl/search/sort.rb
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/search/sort.rb
@@ -36,14 +36,23 @@ module Elasticsearch
#
def to_hash
if @block
- call
+ call unless @block_called
+ @block_called = true
else
- @value << @args if @args
+ @value << @args if @args && !@args.empty? && ! @value.include?(@args)
end
@hash = @value.flatten
@hash
end
+
+ # Return whether the definition is empty
+ #
+ # @return [Boolean]
+ #
+ def empty?
+ to_hash.empty?
+ end
end
end
end
diff --git a/elasticsearch-dsl/lib/elasticsearch/dsl/version.rb b/elasticsearch-dsl/lib/elasticsearch/dsl/version.rb
index 5474bde..a09db75 100644
--- a/elasticsearch-dsl/lib/elasticsearch/dsl/version.rb
+++ b/elasticsearch-dsl/lib/elasticsearch/dsl/version.rb
@@ -1,5 +1,5 @@
module Elasticsearch
module DSL
- VERSION = "0.1.0"
+ VERSION = "0.1.4"
end
end
diff --git a/elasticsearch-dsl/test/integration/search_aggregation_geo_test.rb b/elasticsearch-dsl/test/integration/search_aggregation_geo_test.rb
index 4a3ce51..672a38c 100644
--- a/elasticsearch-dsl/test/integration/search_aggregation_geo_test.rb
+++ b/elasticsearch-dsl/test/integration/search_aggregation_geo_test.rb
@@ -15,7 +15,7 @@ module Elasticsearch
mappings: {
venue: {
properties: {
- location: { type: 'geo_point', normalize: false, validate: true }
+ location: { type: 'geo_point' }
}
}
}
diff --git a/elasticsearch-dsl/test/integration/search_aggregations_test.rb b/elasticsearch-dsl/test/integration/search_aggregations_test.rb
index f5e0e24..6b09de7 100644
--- a/elasticsearch-dsl/test/integration/search_aggregations_test.rb
+++ b/elasticsearch-dsl/test/integration/search_aggregations_test.rb
@@ -36,7 +36,7 @@ module Elasticsearch
assert_equal 3, response['aggregations']['tags']['value']
end
- should "return tag counts per clicks range" do
+ should "return tag counts per clicks range as an inner (nested) aggregation" do
response = @client.search index: 'test', body: search {
aggregation :clicks do
range field: 'clicks' do
@@ -95,6 +95,29 @@ module Elasticsearch
assert_equal 13, response['aggregations']['avg_clicks']['value'].to_i
end
+ should "define a global aggregation" do
+ response = @client.search index: 'test', body: search {
+ query do
+ filtered filter: { terms: { tags: ['two'] } }
+ end
+
+ aggregation :avg_clicks do
+ avg field: 'clicks'
+ end
+
+ aggregation :all_documents do
+ global do
+ aggregation :avg_clicks do
+ avg field: 'clicks'
+ end
+ end
+ end
+ }.to_hash
+
+ assert_equal 15, response['aggregations']['avg_clicks']['value'].to_i
+ assert_equal 13, response['aggregations']['all_documents']['avg_clicks']['value'].to_i
+ end
+
should "return statistics on clicks" do
response = @client.search index: 'test', body: search {
aggregation :stats_clicks do
diff --git a/elasticsearch-dsl/test/integration/search_test.rb b/elasticsearch-dsl/test/integration/search_test.rb
new file mode 100644
index 0000000..44a3393
--- /dev/null
+++ b/elasticsearch-dsl/test/integration/search_test.rb
@@ -0,0 +1,60 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ class SearchIntegrationTest < ::Elasticsearch::Test::IntegrationTestCase
+ include Elasticsearch::DSL::Search
+
+ class MySearch
+ include Elasticsearch::DSL::Search
+
+ def initialize(q)
+ @q = q
+ end
+
+ def tags
+ %w[ one two ]
+ end
+
+ def search_definition
+ search do |q|
+ q.query do |q|
+ q.bool do |q|
+ q.must do |q|
+ q.match title: @q
+ end
+ q.must do |q|
+ q.terms tags: tags
+ end
+ end
+ end
+ end
+ end
+ end
+
+ context "The Search class" do
+ startup do
+ Elasticsearch::Extensions::Test::Cluster.start(nodes: 1) if ENV['SERVER'] and not Elasticsearch::Extensions::Test::Cluster.running?
+ end
+
+ setup do
+ @client.indices.create index: 'test'
+ @client.index index: 'test', type: 'd', id: '1', body: { title: 'Test', tags: ['one'] }
+ @client.index index: 'test', type: 'd', id: '2', body: { title: 'Test', tags: ['one', 'two'] }
+ @client.index index: 'test', type: 'd', id: '3', body: { title: 'Test', tags: ['three'] }
+ @client.indices.refresh index: 'test'
+ end
+
+
+ should "have access to the calling context" do
+ s = MySearch.new('test')
+ response = @client.search index: 'test', body: s.search_definition.to_hash
+
+ assert_equal 2, response['hits']['total']
+ assert_equal 'Test', response['hits']['hits'][0]['_source']['title']
+ assert_same_elements ['1', '2'], response['hits']['hits'].map { |d| d['_id'] }
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/global_test.rb b/elasticsearch-dsl/test/unit/aggregations/global_test.rb
index 1525b44..20e1d64 100644
--- a/elasticsearch-dsl/test/unit/aggregations/global_test.rb
+++ b/elasticsearch-dsl/test/unit/aggregations/global_test.rb
@@ -12,7 +12,25 @@ module Elasticsearch
should "be converted to a Hash" do
assert_equal({ global: {} }, subject.to_hash)
end
-
+
+ should "take a block" do
+ subject = Global.new do
+ end
+ assert_equal({global: {} }, subject.to_hash)
+ end
+
+ should "define aggregations" do
+ subject = Global.new do
+ aggregation :foo do
+ terms field: "bar"
+ end
+ end
+ expected = {
+ aggregations: { foo: { terms: { field: "bar" } } },
+ global: {}
+ }
+ assert_equal(expected, subject.to_hash)
+ end
end
end
end
diff --git a/elasticsearch-dsl/test/unit/aggregations/missing_test.rb b/elasticsearch-dsl/test/unit/aggregations/missing_test.rb
new file mode 100644
index 0000000..84974ad
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/missing_test.rb
@@ -0,0 +1,39 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class MissingTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Missing aggregation" do
+ subject { Missing.new }
+
+ should "be converted to a Hash" do
+ assert_equal({ missing: {} }, subject.to_hash)
+ end
+
+ should "take a Hash" do
+ subject = Missing.new( { field: 'foo' } )
+ assert_equal({ missing: { field: "foo" } }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject.field 'foo'
+
+ assert_equal %w[ field ], subject.to_hash[:missing].keys.map(&:to_s).sort
+ assert_equal 'foo', subject.to_hash[:missing][:field]
+ end
+
+ should "take a block" do
+ subject = Missing.new do
+ field 'bar'
+ end
+
+ assert_equal({missing: { field: 'bar' } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/avg_bucket_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/avg_bucket_test.rb
new file mode 100644
index 0000000..840d152
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/avg_bucket_test.rb
@@ -0,0 +1,39 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class AvgBucketTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Avg Bucket agg" do
+ subject { AvgBucket.new }
+
+ should "be converted to a hash" do
+ assert_equal({ avg_bucket: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = AvgBucket.new :foo
+
+ subject.buckets_path 'bar'
+ subject.gap_policy 'bar'
+ subject.format 'bar'
+
+ assert_equal %w[ buckets_path format gap_policy ],
+ subject.to_hash[:avg_bucket][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:avg_bucket][:foo][:buckets_path]
+ end
+
+ should "take a block" do
+ subject = AvgBucket.new :foo do
+ format 'bar'
+ end
+ assert_equal({avg_bucket: { foo: { format: 'bar' } } }, subject.to_hash)
+ end
+
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/bucket_script_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/bucket_script_test.rb
new file mode 100644
index 0000000..a9ba3c1
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/bucket_script_test.rb
@@ -0,0 +1,39 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class BucketScriptTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Bucket Script agg" do
+ subject { BucketScript.new }
+
+ should "be converted to a hash" do
+ assert_equal({ bucket_script: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = BucketScript.new :foo
+
+ subject.buckets_path foo: 'foo', bar: 'bar'
+ subject.script 'bar'
+ subject.gap_policy 'skip'
+ subject.format 'bar'
+
+ assert_equal %w[ buckets_path format gap_policy script ],
+ subject.to_hash[:bucket_script][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:bucket_script][:foo][:buckets_path][:bar]
+ end
+
+ should "take a block" do
+ subject = BucketScript.new :foo do
+ format 'bar'
+ end
+ assert_equal({bucket_script: { foo: { format: 'bar' } } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/bucket_selector_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/bucket_selector_test.rb
new file mode 100644
index 0000000..394d10e
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/bucket_selector_test.rb
@@ -0,0 +1,38 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class BucketSelectorTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Bucket Selector agg" do
+ subject { BucketSelector.new }
+
+ should "be converted to a hash" do
+ assert_equal({ bucket_selector: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = BucketSelector.new :foo
+
+ subject.buckets_path foo: 'foo', bar: 'bar'
+ subject.script 'bar'
+ subject.gap_policy 'skip'
+
+ assert_equal %w[ buckets_path gap_policy script ],
+ subject.to_hash[:bucket_selector][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:bucket_selector][:foo][:buckets_path][:bar]
+ end
+
+ should "take a block" do
+ subject = BucketSelector.new :foo do
+ gap_policy 'skip'
+ end
+ assert_equal({bucket_selector: { foo: { gap_policy: 'skip' } } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/cumulative_sum_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/cumulative_sum_test.rb
new file mode 100644
index 0000000..57fccb2
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/cumulative_sum_test.rb
@@ -0,0 +1,37 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class CumulativeSumTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Cumulative Sum Bucket agg" do
+ subject { CumulativeSum.new }
+
+ should "be converted to a hash" do
+ assert_equal({ cumulative_sum: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = CumulativeSum.new :foo
+
+ subject.buckets_path 'bar'
+ subject.format 'bar'
+
+ assert_equal %w[ buckets_path format ],
+ subject.to_hash[:cumulative_sum][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:cumulative_sum][:foo][:buckets_path]
+ end
+
+ should "take a block" do
+ subject = CumulativeSum.new :foo do
+ format 'bar'
+ end
+ assert_equal({cumulative_sum: { foo: { format: 'bar' } } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/derivative_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/derivative_test.rb
new file mode 100644
index 0000000..1a2ea65
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/derivative_test.rb
@@ -0,0 +1,39 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class DerivativeTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Derivative agg" do
+ subject { Derivative.new }
+
+ should "be converted to a hash" do
+ assert_equal({ derivative: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = Derivative.new :foo
+
+ subject.buckets_path 'bar'
+ subject.gap_policy 'bar'
+ subject.format 'bar'
+
+ assert_equal %w[ buckets_path format gap_policy ],
+ subject.to_hash[:derivative][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:derivative][:foo][:buckets_path]
+ end
+
+ should "take a block" do
+ subject = Derivative.new :foo do
+ format 'bar'
+ end
+ assert_equal({derivative: { foo: { format: 'bar' } } }, subject.to_hash)
+ end
+
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/extended_stats_bucket_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/extended_stats_bucket_test.rb
new file mode 100644
index 0000000..e1856c5
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/extended_stats_bucket_test.rb
@@ -0,0 +1,38 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class ExtendedStatsBucketTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Extended Stats Bucket agg" do
+ subject { ExtendedStatsBucket.new }
+
+ should "be converted to a hash" do
+ assert_equal({ extended_stats_bucket: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = ExtendedStatsBucket.new :foo
+
+ subject.buckets_path 'bar'
+ subject.gap_policy 'skip'
+ subject.format 'bar'
+
+ assert_equal %w[ buckets_path format gap_policy ],
+ subject.to_hash[:extended_stats_bucket][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:extended_stats_bucket][:foo][:buckets_path]
+ end
+
+ should "take a block" do
+ subject = ExtendedStatsBucket.new :foo do
+ format 'bar'
+ end
+ assert_equal({extended_stats_bucket: { foo: { format: 'bar' } } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/max_bucket_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/max_bucket_test.rb
new file mode 100644
index 0000000..1b553bb
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/max_bucket_test.rb
@@ -0,0 +1,38 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class MaxBucketTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Max Bucket agg" do
+ subject { MaxBucket.new }
+
+ should "be converted to a hash" do
+ assert_equal({ max_bucket: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = MaxBucket.new :foo
+
+ subject.buckets_path 'bar'
+ subject.gap_policy 'bar'
+ subject.format 'bar'
+
+ assert_equal %w[ buckets_path format gap_policy ],
+ subject.to_hash[:max_bucket][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:max_bucket][:foo][:buckets_path]
+ end
+
+ should "take a block" do
+ subject = MaxBucket.new :foo do
+ format 'bar'
+ end
+ assert_equal({max_bucket: { foo: { format: 'bar' } } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/min_bucket_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/min_bucket_test.rb
new file mode 100644
index 0000000..dc9265c
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/min_bucket_test.rb
@@ -0,0 +1,38 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class MinBucketTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Min Bucket agg" do
+ subject { MinBucket.new }
+
+ should "be converted to a hash" do
+ assert_equal({ min_bucket: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = MinBucket.new :foo
+
+ subject.buckets_path 'bar'
+ subject.gap_policy 'bar'
+ subject.format 'bar'
+
+ assert_equal %w[ buckets_path format gap_policy ],
+ subject.to_hash[:min_bucket][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:min_bucket][:foo][:buckets_path]
+ end
+
+ should "take a block" do
+ subject = MinBucket.new :foo do
+ format 'bar'
+ end
+ assert_equal({min_bucket: { foo: { format: 'bar' } } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/moving_avg_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/moving_avg_test.rb
new file mode 100644
index 0000000..9c0e220
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/moving_avg_test.rb
@@ -0,0 +1,41 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class MovingAvgTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Moving Average Bucket agg" do
+ subject { MovingAvg.new }
+
+ should "be converted to a hash" do
+ assert_equal({ moving_avg: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = MovingAvg.new :foo
+
+ subject.buckets_path 'bar'
+ subject.gap_policy 'skip'
+ subject.minimize false
+ subject.model 'simple'
+ subject.settings({ period: 7 })
+ subject.window 5
+
+ assert_equal %w[ buckets_path gap_policy minimize model settings window ],
+ subject.to_hash[:moving_avg][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:moving_avg][:foo][:buckets_path]
+ end
+
+ should "take a block" do
+ subject = MovingAvg.new :foo do
+ format 'bar'
+ end
+ assert_equal({moving_avg: { foo: { format: 'bar' } } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/percentiles_bucket_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/percentiles_bucket_test.rb
new file mode 100644
index 0000000..0cfb614
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/percentiles_bucket_test.rb
@@ -0,0 +1,39 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class PercentilesBucketTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Percentiles Bucket agg" do
+ subject { PercentilesBucket.new }
+
+ should "be converted to a hash" do
+ assert_equal({ percentiles_bucket: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = PercentilesBucket.new :foo
+
+ subject.buckets_path 'bar'
+ subject.gap_policy 'skip'
+ subject.format 'bar'
+ subject.percents [ 1, 5, 25, 50, 75, 95, 99 ]
+
+ assert_equal %w[ buckets_path format gap_policy percents ],
+ subject.to_hash[:percentiles_bucket][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:percentiles_bucket][:foo][:buckets_path]
+ end
+
+ should "take a block" do
+ subject = PercentilesBucket.new :foo do
+ format 'bar'
+ end
+ assert_equal({percentiles_bucket: { foo: { format: 'bar' } } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/serial_diff_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/serial_diff_test.rb
new file mode 100644
index 0000000..7f4833b
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/serial_diff_test.rb
@@ -0,0 +1,39 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class SerialDiffTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Serial Defferencing agg" do
+ subject { SerialDiff.new }
+
+ should "be converted to a hash" do
+ assert_equal({ serial_diff: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = SerialDiff.new :foo
+
+ subject.buckets_path 'bar'
+ subject.lag 1
+ subject.gap_policy 'skip'
+ subject.format 'foo'
+
+ assert_equal %w[ buckets_path format gap_policy lag ],
+ subject.to_hash[:serial_diff][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:serial_diff][:foo][:buckets_path]
+ end
+
+ should "take a block" do
+ subject = SerialDiff.new :foo do
+ gap_policy 'skip'
+ end
+ assert_equal({serial_diff: { foo: { gap_policy: 'skip' } } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/stats_bucket_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/stats_bucket_test.rb
new file mode 100644
index 0000000..f1a36b1
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/stats_bucket_test.rb
@@ -0,0 +1,38 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class StatsBucketTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Stats Bucket agg" do
+ subject { StatsBucket.new }
+
+ should "be converted to a hash" do
+ assert_equal({ stats_bucket: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = StatsBucket.new :foo
+
+ subject.buckets_path 'bar'
+ subject.gap_policy 'bar'
+ subject.format 'bar'
+
+ assert_equal %w[ buckets_path format gap_policy ],
+ subject.to_hash[:stats_bucket][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:stats_bucket][:foo][:buckets_path]
+ end
+
+ should "take a block" do
+ subject = StatsBucket.new :foo do
+ format 'bar'
+ end
+ assert_equal({stats_bucket: { foo: { format: 'bar' } } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/pipeline/sum_bucket_test.rb b/elasticsearch-dsl/test/unit/aggregations/pipeline/sum_bucket_test.rb
new file mode 100644
index 0000000..477e89e
--- /dev/null
+++ b/elasticsearch-dsl/test/unit/aggregations/pipeline/sum_bucket_test.rb
@@ -0,0 +1,38 @@
+require 'test_helper'
+
+module Elasticsearch
+ module Test
+ module Aggregations
+ class SumBucketTest < ::Test::Unit::TestCase
+ include Elasticsearch::DSL::Search::Aggregations
+
+ context "Sum Bucket agg" do
+ subject { SumBucket.new }
+
+ should "be converted to a hash" do
+ assert_equal({ sum_bucket: {} }, subject.to_hash)
+ end
+
+ should "have option methods" do
+ subject = SumBucket.new :foo
+
+ subject.buckets_path 'bar'
+ subject.gap_policy 'bar'
+ subject.format 'bar'
+
+ assert_equal %w[ buckets_path format gap_policy ],
+ subject.to_hash[:sum_bucket][:foo].keys.map(&:to_s).sort
+ assert_equal 'bar', subject.to_hash[:sum_bucket][:foo][:buckets_path]
+ end
+
+ should "take a block" do
+ subject = SumBucket.new :foo do
+ format 'bar'
+ end
+ assert_equal({sum_bucket: { foo: { format: 'bar' } } }, subject.to_hash)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-dsl/test/unit/aggregations/stats_test.rb b/elasticsearch-dsl/test/unit/aggregations/stats_test.rb
index 3e8c153..0179ba7 100644
--- a/elasticsearch-dsl/test/unit/aggregations/stats_test.rb
+++ b/elasticsearch-dsl/test/unit/aggregations/stats_test.rb
@@ -17,6 +17,14 @@ module Elasticsearch
subject = Stats.new foo: 'bar'
assert_equal({ stats: { foo: 'bar' } }, subject.to_hash)
end
+
+ should "take a block" do
+ subject = Stats.new do
+ field 'bar'
+ end
+
+ assert_equal({stats: { field: 'bar' } }, subject.to_hash)
+ end
end
end
end
diff --git a/elasticsearch-dsl/test/unit/filters/range_test.rb b/elasticsearch-dsl/test/unit/filters/range_test.rb
index c72ea89..f426f91 100644
--- a/elasticsearch-dsl/test/unit/filters/range_test.rb
+++ b/elasticsearch-dsl/test/unit/filters/range_test.rb
@@ -19,8 +19,9 @@ module Elasticsearch
subject.gte 'bar'
subject.lte 'bar'
subject.time_zone 'bar'
+ subject.format 'bar'
- assert_equal %w[ gte lte time_zone ],
+ assert_equal %w[ format gte lte time_zone ],
subject.to_hash[:range][:foo].keys.map(&:to_s).sort
assert_equal 'bar', subject.to_hash[:range][:foo][:gte]
diff --git a/elasticsearch-dsl/test/unit/queries/bool_test.rb b/elasticsearch-dsl/test/unit/queries/bool_test.rb
index 94ce606..14d9bb5 100644
--- a/elasticsearch-dsl/test/unit/queries/bool_test.rb
+++ b/elasticsearch-dsl/test/unit/queries/bool_test.rb
@@ -92,6 +92,15 @@ module Elasticsearch
subject.to_hash )
end
+ should "allow adding a filter" do
+ subject = Bool.new
+ subject.filter do
+ term foo: 'bar'
+ end
+
+ assert_equal( { bool: { filter: { term: { foo: "bar" } } } }, subject.to_hash)
+ end
+
should "be chainable" do
subject = Bool.new
diff --git a/elasticsearch-dsl/test/unit/queries/match_test.rb b/elasticsearch-dsl/test/unit/queries/match_test.rb
index 1fdce8d..89b1eb4 100644
--- a/elasticsearch-dsl/test/unit/queries/match_test.rb
+++ b/elasticsearch-dsl/test/unit/queries/match_test.rb
@@ -39,13 +39,14 @@ module Elasticsearch
should "take a block" do
subject = Match.new :message do
- query 'test'
- operator 'and'
- type 'phrase_prefix'
- boost 2
+ query 'test'
+ operator 'and'
+ type 'phrase_prefix'
+ boost 2
+ fuzziness 'AUTO'
end
- assert_equal({match: {message: {query: "test", operator: "and", type: 'phrase_prefix', boost: 2}}},
+ assert_equal({match: {message: {query: "test", operator: "and", type: 'phrase_prefix', boost: 2, fuzziness: 'AUTO'}}},
subject.to_hash)
end
diff --git a/elasticsearch-dsl/test/unit/queries/range_test.rb b/elasticsearch-dsl/test/unit/queries/range_test.rb
index 1722bf5..70de6c6 100644
--- a/elasticsearch-dsl/test/unit/queries/range_test.rb
+++ b/elasticsearch-dsl/test/unit/queries/range_test.rb
@@ -19,17 +19,19 @@ module Elasticsearch
gte 10
lte 20
boost 2
+ format 'mm/dd/yyyy'
end
- assert_equal({:range=>{:age=>{:gte=>10, :lte=>20, :boost=>2}}}, @subject.to_hash)
+ assert_equal({:range=>{:age=>{:gte=>10, :lte=>20, :boost=>2, :format=>'mm/dd/yyyy'}}}, @subject.to_hash)
end
should "take a method call" do
@subject = Range.new :age
@subject.gte 10
@subject.lte 20
+ @subject.format 'mm/dd/yyyy'
- assert_equal({:range=>{:age=>{:gte=>10, :lte=>20}}}, @subject.to_hash)
+ assert_equal({:range=>{:age=>{:gte=>10, :lte=>20, :format=>'mm/dd/yyyy'}}}, @subject.to_hash)
end
end
diff --git a/elasticsearch-dsl/test/unit/search_sort_test.rb b/elasticsearch-dsl/test/unit/search_sort_test.rb
index 61b72e2..41fe3a4 100644
--- a/elasticsearch-dsl/test/unit/search_sort_test.rb
+++ b/elasticsearch-dsl/test/unit/search_sort_test.rb
@@ -34,6 +34,31 @@ module Elasticsearch
{ bar: { order: 'desc' } },
], subject.to_hash )
end
+
+ should "be empty" do
+ subject = Elasticsearch::DSL::Search::Sort.new
+ assert_equal subject.empty?, true
+ end
+
+ should "not be empty" do
+ subject = Elasticsearch::DSL::Search::Sort.new foo: { order: 'desc' }
+ assert_equal subject.empty?, false
+ end
+
+ context "#to_hash" do
+ should "not duplicate values when defined by arguments" do
+ subject = Elasticsearch::DSL::Search::Sort.new foo: { order: 'desc' }
+ assert_equal(subject.to_hash, subject.to_hash)
+ end
+
+ should "not duplicate values when defined by a block" do
+ subject = Elasticsearch::DSL::Search::Sort.new do
+ by :foo
+ end
+
+ assert_equal(subject.to_hash, subject.to_hash)
+ end
+ end
end
end
end
diff --git a/elasticsearch-dsl/test/unit/search_test.rb b/elasticsearch-dsl/test/unit/search_test.rb
index e1d3577..381e178 100644
--- a/elasticsearch-dsl/test/unit/search_test.rb
+++ b/elasticsearch-dsl/test/unit/search_test.rb
@@ -21,6 +21,40 @@ module Elasticsearch
assert_instance_of Elasticsearch::DSL::Search::Search, Elasticsearch::DSL::Search.search
end
+
+ should "have access to the calling context" do
+ class DummySearchReceiver
+ include Elasticsearch::DSL::Search
+
+ def initialize
+ @other_value = 'foo'
+ end
+
+ def value
+ 42
+ end
+
+ def search_definition
+ search do |q|
+ q.from value
+ q.size @other_value
+
+ q.filter do |q|
+ q._and do |q|
+ q.term thang: @other_value
+ q.term attributes: value
+ end
+ end
+ end
+ end
+ end
+
+ assert_equal({from: 42,
+ size: 'foo',
+ filter: { and: [ { term: { thang: 'foo' } },
+ { term: { attributes: 42 } }]}},
+ DummySearchReceiver.new.search_definition.to_hash)
+ end
end
context "The Search class" do
diff --git a/elasticsearch-extensions/README.md b/elasticsearch-extensions/README.md
index bc92c4a..b385159 100644
--- a/elasticsearch-extensions/README.md
+++ b/elasticsearch-extensions/README.md
@@ -23,6 +23,85 @@ or install it from a source code checkout:
## Extensions
+### Backup
+
+Backup Elasticsearch indices as flat JSON files on the disk via integration
+with the [_Backup_](http://backup.github.io/backup/v4/) gem.
+
+Use the Backup gem's DSL to configure the backup:
+
+ require 'elasticsearch/extensions/backup'
+
+ Model.new(:elasticsearch_backup, 'Elasticsearch') do
+
+ database Elasticsearch do |db|
+ db.url = 'http://localhost:9200'
+ db.indices = 'test'
+ end
+
+ store_with Local do |local|
+ local.path = '/tmp/backups'
+ end
+
+ compress_with Gzip
+ end
+
+Perform the backup with the Backup gem's command line utility:
+
+ $ backup perform -t elasticsearch_backup
+
+See more information in the [`Backup::Database::Elasticsearch`](lib/extensions/backup.rb)
+class documentation.
+
+### Reindex
+
+Copy documents from one index and cluster into another one, for example for purposes of changing
+the settings and mappings of the index.
+
+**NOTE:** Elasticsearch natively supports re-indexing since version 2.3. This extension is useful
+ when you need the feature on older versions.
+
+When the extension is loaded together with the
+[Ruby client for Elasticsearch](../elasticsearch/README.md),
+a `reindex` method is added to the client:
+
+ require 'elasticsearch'
+ require 'elasticsearch/extensions/reindex'
+
+ client = Elasticsearch::Client.new
+ target_client = Elasticsearch::Client.new url: 'http://localhost:9250', log: true
+
+ client.index index: 'test', type: 'd', body: { title: 'foo' }
+
+ client.reindex source: { index: 'test' },
+ target: { index: 'test', client: target_client },
+ transform: lambda { |doc| doc['_source']['title'].upcase! },
+ refresh: true
+ # => { errors: 0 }
+
+ target_client.search index: 'test'
+ # => ... hits ... "title"=>"FOO"
+
+The method takes similar arguments as the core API
+[`reindex`](http://www.rubydoc.info/gems/elasticsearch-api/Elasticsearch/API/Actions#reindex-instance_method)
+method.
+
+You can also use the `Reindex` class directly:
+
+ require 'elasticsearch'
+ require 'elasticsearch/extensions/reindex'
+
+ client = Elasticsearch::Client.new
+
+ reindex = Elasticsearch::Extensions::Reindex.new \
+ source: { index: 'test', client: client },
+ target: { index: 'test-copy' }
+
+ reindex.perform
+
+See more information in the [`Elasticsearch::Extensions::Reindex::Reindex`](lib/extensions/reindex.rb)
+class documentation.
+
### ANSI
Colorize and format selected Elasticsearch response parts in terminal:
@@ -90,6 +169,9 @@ You can control the cluster configuration with environment variables as well:
TEST_CLUSTER_NAME=my_testing_cluster \
ruby -r elasticsearch -e "require 'elasticsearch/extensions/test/cluster'; Elasticsearch::Extensions::Test::Cluster.start"
+To prevent deleting data and configurations when the cluster is started, for example in a development environment,
+use the `clear_cluster: false` option or the `TEST_CLUSTER_CLEAR=false` environment variable.
+
[Full documentation](http://rubydoc.info/gems/elasticsearch-extensions/Elasticsearch/Extensions/Test/Cluster)
### Test::StartupShutdown
diff --git a/elasticsearch-extensions/Rakefile b/elasticsearch-extensions/Rakefile
index 6927603..daec1ab 100644
--- a/elasticsearch-extensions/Rakefile
+++ b/elasticsearch-extensions/Rakefile
@@ -36,7 +36,7 @@ namespace :test do
Rake::TestTask.new(:all) do |test|
Rake::Task['test:ci_reporter'].invoke if ENV['CI']
test.libs << 'lib' << 'test'
- test.test_files = FileList["test/**/unit/**/*_test.rb", "test/**/unit/**/*_test.rb"]
+ test.test_files = FileList["test/**/unit/**/*_test.rb", "test/**/integration/**/*_test.rb"]
end
Rake::TestTask.new(:profile) do |test|
diff --git a/elasticsearch-extensions/elasticsearch-extensions.gemspec b/elasticsearch-extensions/elasticsearch-extensions.gemspec
index 8194eb8..d7f429f 100644
--- a/elasticsearch-extensions/elasticsearch-extensions.gemspec
+++ b/elasticsearch-extensions/elasticsearch-extensions.gemspec
@@ -30,7 +30,12 @@ Gem::Specification.new do |s|
end
s.add_development_dependency "bundler", "> 1"
- s.add_development_dependency "rake"
+
+ if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9'
+ s.add_development_dependency "rake", "~> 11.1"
+ else
+ s.add_development_dependency "rake", "< 11.0"
+ end
s.add_development_dependency "awesome_print"
diff --git a/elasticsearch-extensions/lib/elasticsearch/extensions.rb b/elasticsearch-extensions/lib/elasticsearch/extensions.rb
index c03e845..39e44c5 100644
--- a/elasticsearch-extensions/lib/elasticsearch/extensions.rb
+++ b/elasticsearch-extensions/lib/elasticsearch/extensions.rb
@@ -1,3 +1,5 @@
+# encoding: utf-8
+
require 'elasticsearch'
require 'elasticsearch/extensions/version'
diff --git a/elasticsearch-extensions/lib/elasticsearch/extensions/backup.rb b/elasticsearch-extensions/lib/elasticsearch/extensions/backup.rb
index 01a0a53..bd1dbda 100644
--- a/elasticsearch-extensions/lib/elasticsearch/extensions/backup.rb
+++ b/elasticsearch-extensions/lib/elasticsearch/extensions/backup.rb
@@ -12,25 +12,25 @@ require 'patron'
module Backup
module Database
- # Integration with the Backup gem [https://github.com/meskyanichi/backup/]
+ # Integration with the Backup gem [http://backup.github.io/backup/v4/]
#
# This extension allows to backup Elasticsearch indices as flat JSON files on the disk.
#
- # Use the Backup gem's DSL to configure the backup:
+ # @example Use the Backup gem's DSL to configure the backup
#
# require 'elasticsearch/extensions/backup'
#
# Model.new(:elasticsearch_backup, 'Elasticsearch') do
#
# database Elasticsearch do |db|
- # # db.url = 'http://localhost:9200'
- # # db.indices = 'articles,people'
- # # db.size = 500
- # # db.scroll = '10m'
+ # db.url = 'http://localhost:9200'
+ # db.indices = 'articles,people'
+ # db.size = 500
+ # db.scroll = '10m'
# end
#
# store_with Local do |local|
- # local.path = '/usr/local/var/backups'
+ # local.path = '/tmp/backups'
# local.keep = 3
# end
#
@@ -41,8 +41,33 @@ module Backup
#
# $ backup perform -t elasticsearch_backup
#
+ # The Backup gem can store your backup files on S3, Dropbox and other
+ # cloud providers, send notifications about the operation, and so on;
+ # read more in the gem documentation.
#
- # A simple recover script could look like this:
+ # @example Use the integration as a standalone script (eg. in a Rake task)
+ #
+ # require 'backup'
+ # require 'elasticsearch/extensions/backup'
+ #
+ # Backup::Logger.configure do
+ # logfile.enabled = true
+ # logfile.log_path = '/tmp/backups/log'
+ # end; Backup::Logger.start!
+ #
+ # backup = Backup::Model.new(:elasticsearch, 'Backup Elasticsearch') do
+ # database Backup::Database::Elasticsearch do |db|
+ # db.indices = 'test'
+ # end
+ #
+ # store_with Backup::Storage::Local do |local|
+ # local.path = '/tmp/backups'
+ # end
+ # end
+ #
+ # backup.perform!
+ #
+ # @example A simple recover script for the backup created in the previous examples
#
# PATH = '/path/to/backup/'
#
@@ -66,7 +91,7 @@ module Backup
# client.bulk body: payload
# end
#
- # @see http://meskyanichi.github.io/backup/v4/
+ # @see http://backup.github.io/backup/v4/
#
class Elasticsearch < Base
class Error < ::Backup::Error; end
diff --git a/elasticsearch-extensions/lib/elasticsearch/extensions/reindex.rb b/elasticsearch-extensions/lib/elasticsearch/extensions/reindex.rb
new file mode 100644
index 0000000..fdfbe1d
--- /dev/null
+++ b/elasticsearch-extensions/lib/elasticsearch/extensions/reindex.rb
@@ -0,0 +1,160 @@
+# encoding: utf-8
+
+module Elasticsearch
+ module Extensions
+
+ # This module allows copying documents from one index/cluster to another one
+ #
+ # When required together with the client, it will add the `reindex` method
+ #
+ # @see Reindex::Reindex.initialize
+ # @see Reindex::Reindex#perform
+ #
+ # @see http://www.rubydoc.info/gems/elasticsearch-api/Elasticsearch/API/Actions#reindex-instance_method
+ #
+ module Reindex
+
+ # Initialize a new instance of the Reindex class (shortcut)
+ #
+ # @see Reindex::Reindex.initialize
+ #
+ def new(arguments={})
+ Reindex.new(arguments)
+ end; extend self
+
+ module API
+ # Copy documents from one index into another and refresh the target index
+ #
+ # @example
+ # client.reindex source: { index: 'test1' }, target: { index: 'test2' }, refresh: true
+ #
+ # The method allows all the options as {Reindex::Reindex.new}.
+ #
+ # This method will be mixed into the Elasticsearch client's API, if available.
+ #
+ def reindex(arguments={})
+ arguments[:source] ||= {}
+ arguments[:source][:client] = self
+ Reindex.new(arguments).perform
+ end
+ end
+
+ # Include the `reindex` method in the API and client, if available
+ Elasticsearch::API::Actions.__send__ :include, API if defined?(Elasticsearch::API::Actions)
+ Elasticsearch::Transport::Client.__send__ :include, API if defined?(Elasticsearch::Transport::Client) && defined?(Elasticsearch::API)
+
+ # Copy documents from one index into another
+ #
+ # @example Copy documents to another index
+ #
+ # client = Elasticsearch::Client.new
+ # reindex = Elasticsearch::Extensions::Reindex.new \
+ # source: { index: 'test1', client: client },
+ # target: { index: 'test2' }
+ #
+ # reindex.perform
+ #
+ # @example Copy documents to a different cluster
+ #
+ # source_client = Elasticsearch::Client.new url: 'http://localhost:9200'
+ # target_client = Elasticsearch::Client.new url: 'http://localhost:9250'
+ #
+ # reindex = Elasticsearch::Extensions::Reindex.new \
+ # source: { index: 'test', client: source_client },
+ # target: { index: 'test', client: target_client }
+ # reindex.perform
+ #
+ # @example Transform the documents during re-indexing
+ #
+ # reindex = Elasticsearch::Extensions::Reindex.new \
+ # source: { index: 'test1', client: client },
+ # target: { index: 'test2', transform: lambda { |doc| doc['_source']['category'].upcase! } }
+ #
+ #
+ # The reindexing process works by "scrolling" an index and sending
+ # batches via the "Bulk" API to the target index/cluster
+ #
+ # @option arguments [String] :source The source index/cluster definition (*Required*)
+ # @option arguments [String] :target The target index/cluster definition (*Required*)
+ # @option arguments [Proc] :transform A block which will be executed for each document
+ # @option arguments [Integer] :batch_size The size of the batch for scroll operation (Default: 1000)
+ # @option arguments [String] :scroll The timeout for the scroll operation (Default: 5min)
+ # @option arguments [Boolean] :refresh Whether to refresh the target index after
+ # the operation is completed (Default: false)
+ #
+ # Be aware, that if you want to change the target index settings and/or mappings,
+ # you have to do so in advance by using the "Indices Create" API.
+ #
+ # Note, that there is a native "Reindex" API in Elasticsearch 2.3.x and higer versions,
+ # which will be more performant than the Ruby version.
+ #
+ # @see http://www.rubydoc.info/gems/elasticsearch-api/Elasticsearch/API/Actions#reindex-instance_method
+ #
+ class Reindex
+ attr_reader :arguments
+
+ def initialize(arguments={})
+ [
+ [:source, :index],
+ [:source, :client],
+ [:target, :index]
+ ].each do |required_option|
+ value = required_option.reduce(arguments) { |sum, o| sum = sum[o] ? sum[o] : {} }
+
+ raise ArgumentError,
+ "Required argument '#{Hash[*required_option]}' missing" if \
+ value.respond_to?(:empty?) ? value.empty? : value.nil?
+ end
+
+ @arguments = {
+ batch_size: 1000,
+ scroll: '5m',
+ refresh: false
+ }.merge(arguments)
+
+ arguments[:target][:client] ||= arguments[:source][:client]
+ end
+
+ # Performs the operation
+ #
+ # @return [Hash] A Hash with the information about the operation outcome
+ #
+ def perform
+ output = { errors: 0 }
+
+ response = arguments[:source][:client].search(
+ index: arguments[:source][:index],
+ scroll: arguments[:scroll],
+ size: arguments[:batch_size],
+ search_type: 'scan',
+ fields: ['_source', '_parent', '_routing', '_timestamp']
+ )
+
+ while response = arguments[:source][:client].scroll(scroll_id: response['_scroll_id'], scroll: arguments[:scroll]) do
+ documents = response['hits']['hits']
+ break if documents.empty?
+
+ bulk = documents.map do |doc|
+ doc['_index'] = arguments[:target][:index]
+
+ arguments[:transform].call(doc) if arguments[:transform]
+
+ doc['data'] = doc['_source']
+ doc.delete('_score')
+ doc.delete('_source')
+
+ { index: doc }
+ end
+
+ bulk_response = arguments[:target][:client].bulk body: bulk
+ output[:errors] += bulk_response['items'].select { |k, v| k.values.first['error'] }.size
+ end
+
+ arguments[:target][:client].indices.refresh index: arguments[:target][:index] if arguments[:refresh]
+
+ output
+ end
+ end
+ end
+ end
+end
diff --git a/elasticsearch-extensions/lib/elasticsearch/extensions/test/cluster.rb b/elasticsearch-extensions/lib/elasticsearch/extensions/test/cluster.rb
index 0efd82e..1d8ef9b 100644
--- a/elasticsearch-extensions/lib/elasticsearch/extensions/test/cluster.rb
+++ b/elasticsearch-extensions/lib/elasticsearch/extensions/test/cluster.rb
@@ -21,293 +21,567 @@ module Elasticsearch
module Extensions
module Test
- # A convenience Ruby class for starting and stopping a separate testing in-memory cluster,
- # to not depend on -- and not mess up -- <localhost:9200>.
+ # A convenience Ruby class for starting and stopping an Elasticsearch cluster,
+ # eg. for integration tests
#
# @example Start a cluster with default configuration
# require 'elasticsearch/extensions/test/cluster'
- # Elasticsearch::Extensions::Test::Cluster.start
+ # Elasticsearch::Extensions::Test::Cluster::Cluster.new.start
#
- # @see Cluster#start Cluster.start
- # @see Cluster#stop Cluster.stop
+ # @see Cluster#initialize
#
module Cluster
- @@number_of_nodes = (ENV['TEST_CLUSTER_NODES'] || 2).to_i
- @@default_cluster_name = "elasticsearch-test-#{Socket.gethostname.downcase}"
# Starts a cluster
#
- # Launches the specified number of nodes in test-suitable configuration by default
- # and prints information about the cluster -- unless this specific cluster is running already.
+ # @see Cluster#start
#
- # Use the {Cluster#stop Cluster.stop} command with the same arguments to stop this cluster.
- #
- # @option arguments [String] :cluster_name Cluster name (default: `elasticsearch_test`)
- # @option arguments [Integer] :nodes Number of desired nodes (default: 2)
- # @option arguments [String] :command Elasticsearch command (default: `elasticsearch`)
- # @option arguments [String] :port Starting port number; will be auto-incremented (default: 9250)
- # @option arguments [String] :node_name The node name (will be appended with a number)
- # @option arguments [String] :path_data Path to the directory to store data in
- # @option arguments [String] :path_work Path to the directory with auxiliary files
- # @option arguments [Boolean] :multicast_enabled Whether multicast is enabled (default: true)
- # @option arguments [Integer] :timeout Timeout when starting the cluster (default: 30)
+ def start(arguments={})
+ Cluster.new(arguments).start
+ end
+
+ # Stops a cluster
#
- # You can also use environment variables to set these options.
+ # @see Cluster#stop
#
- # @example Start a cluster with default configuration (2 nodes, in-memory, etc)
- # Elasticsearch::Extensions::Test::Cluster.start
+ def stop(arguments={})
+ Cluster.new(arguments).stop
+ end
+
+ # Returns true when a specific test node is running within the cluster
#
- # @example Start a cluster with a custom configuration
- # Elasticsearch::Extensions::Test::Cluster.start \
- # cluster_name: 'my-cluster',
- # nodes: 3,
- # node_name: 'my-node',
- # port: 9350
+ # @see Cluster#running?
#
- # @example Start a cluster with a different Elasticsearch version
- # Elasticsearch::Extensions::Test::Cluster.start \
- # command: "/usr/local/Cellar/elasticsearch/1.0.0.Beta2/bin/elasticsearch"
+ def running?(arguments={})
+ Cluster.new(arguments).running?
+ end
+
+ # Waits until the cluster is green and prints information
#
- # @return Boolean
- # @see Cluster#stop Cluster.stop
+ # @see Cluster#wait_for_green
#
- def start(arguments={})
- @@number_of_nodes = ( ENV.fetch('TEST_CLUSTER_NODES', arguments[:nodes] || 2) ).to_i
-
- arguments[:command] ||= ENV.fetch('TEST_CLUSTER_COMMAND', 'elasticsearch')
- arguments[:port] ||= (ENV.fetch('TEST_CLUSTER_PORT', 9250).to_i)
- arguments[:cluster_name] ||= (ENV.fetch('TEST_CLUSTER_NAME', @@default_cluster_name).chomp)
- arguments[:node_name] ||= ENV.fetch('TEST_CLUSTER_NODE_NAME', 'node')
- arguments[:path_data] ||= ENV.fetch('TEST_CLUSTER_DATA', '/tmp/elasticsearch_test')
- arguments[:path_work] ||= ENV.fetch('TEST_CLUSTER_TMP', '/tmp')
- arguments[:es_params] ||= ENV.fetch('TEST_CLUSTER_PARAMS', '')
- arguments[:multicast_enabled] ||= ENV.fetch('TEST_CLUSTER_MULTICAST', 'true')
- arguments[:timeout] ||= (ENV.fetch('TEST_CLUSTER_TIMEOUT', 30).to_i)
-
- # Make sure `cluster_name` is not dangerous
- if arguments[:cluster_name] =~ /^[\/\\]?$/
- raise ArgumentError, "The `cluster_name` parameter cannot be empty string or a slash"
- end
-
- if running? :on => arguments[:port], :as => arguments[:cluster_name]
- print "[!] Elasticsearch cluster already running".ansi(:red)
- wait_for_green(arguments[:port], arguments[:timeout])
- return false
- end
+ def wait_for_green(arguments={})
+ Cluster.new(arguments).wait_for_green
+ end
- # Wipe out data for this cluster name
- FileUtils.rm_rf "#{arguments[:path_data]}/#{arguments[:cluster_name]}"
+ module_function :start, :stop, :running?, :wait_for_green
- print "Starting ".ansi(:faint) +
- @@number_of_nodes.to_s.ansi(:bold, :faint) +
- " Elasticsearch nodes..".ansi(:faint)
+ class Cluster
+ attr_reader :arguments
- pids = []
+ COMMANDS = {
+ '0.90' => lambda { |arguments, node_number|
+ <<-COMMAND.gsub(/ /, '')
+ #{arguments[:command]} \
+ -f \
+ -D es.cluster.name=#{arguments[:cluster_name]} \
+ -D es.node.name=#{arguments[:node_name]}-#{node_number} \
+ -D es.http.port=#{arguments[:port].to_i + (node_number-1)} \
+ -D es.path.data=#{arguments[:path_data]} \
+ -D es.path.work=#{arguments[:path_work]} \
+ -D es.path.logs=#{arguments[:path_logs]} \
+ -D es.cluster.routing.allocation.disk.threshold_enabled=false \
+ -D es.network.host=#{arguments[:network_host]} \
+ -D es.discovery.zen.ping.multicast.enabled=#{arguments[:multicast_enabled]} \
+ -D es.script.inline=true \
+ -D es.script.indexed=true \
+ -D es.node.test=true \
+ -D es.node.testattr=test \
+ -D es.node.bench=true \
+ -D es.path.repo=/tmp \
+ -D es.repositories.url.allowed_urls=http://snapshot.test* \
+ -D es.logger.level=DEBUG \
+ #{arguments[:es_params]} \
+ > /dev/null
+ COMMAND
+ },
- @@number_of_nodes.times do |n|
- n += 1
- command = <<-COMMAND
- #{arguments[:command]} \
+ '1.0' => lambda { |arguments, node_number|
+ <<-COMMAND.gsub(/ /, '')
+ #{arguments[:command]} \
-D es.foreground=yes \
-D es.cluster.name=#{arguments[:cluster_name]} \
- -D es.node.name=#{arguments[:node_name]}-#{n} \
- -D es.http.port=#{arguments[:port].to_i + (n-1)} \
+ -D es.node.name=#{arguments[:node_name]}-#{node_number} \
+ -D es.http.port=#{arguments[:port].to_i + (node_number-1)} \
-D es.path.data=#{arguments[:path_data]} \
-D es.path.work=#{arguments[:path_work]} \
+ -D es.path.logs=#{arguments[:path_logs]} \
-D es.cluster.routing.allocation.disk.threshold_enabled=false \
- -D es.network.host=0.0.0.0 \
+ -D es.network.host=#{arguments[:network_host]} \
-D es.discovery.zen.ping.multicast.enabled=#{arguments[:multicast_enabled]} \
-D es.script.inline=on \
-D es.script.indexed=on \
-D es.node.test=true \
+ -D es.node.testattr=test \
-D es.node.bench=true \
- -D es.logger.level=DEBUG \
+ -D es.path.repo=/tmp \
+ -D es.repositories.url.allowed_urls=http://snapshot.test* \
+ -D es.logger.level=#{ENV['DEBUG'] ? 'DEBUG' : 'INFO'} \
#{arguments[:es_params]} \
> /dev/null
- COMMAND
- STDERR.puts command.gsub(/ {1,}/, ' ') if ENV['DEBUG']
+ COMMAND
+ },
- pid = Process.spawn(command)
- Process.detach pid
- pids << pid
+ '2.0' => lambda { |arguments, node_number|
+ <<-COMMAND.gsub(/ /, '')
+ #{arguments[:command]} \
+ -D es.foreground=yes \
+ -D es.cluster.name=#{arguments[:cluster_name]} \
+ -D es.node.name=#{arguments[:node_name]}-#{node_number} \
+ -D es.http.port=#{arguments[:port].to_i + (node_number-1)} \
+ -D es.path.data=#{arguments[:path_data]} \
+ -D es.path.work=#{arguments[:path_work]} \
+ -D es.path.logs=#{arguments[:path_logs]} \
+ -D es.cluster.routing.allocation.disk.threshold_enabled=false \
+ -D es.network.host=#{arguments[:network_host]} \
+ -D es.script.inline=true \
+ -D es.script.stored=true \
+ -D es.node.attr.testattr=test \
+ -D es.path.repo=/tmp \
+ -D es.repositories.url.allowed_urls=http://snapshot.test* \
+ -D es.logger.level=DEBUG \
+ #{arguments[:es_params]} \
+ > /dev/null
+ COMMAND
+ },
+
+ '5.0' => lambda { |arguments, node_number|
+ <<-COMMAND.gsub(/ /, '')
+ #{arguments[:command]} \
+ -E cluster.name=#{arguments[:cluster_name]} \
+ -E node.name=#{arguments[:node_name]}-#{node_number} \
+ -E http.port=#{arguments[:port].to_i + (node_number-1)} \
+ -E path.data=#{arguments[:path_data]} \
+ -E path.logs=#{arguments[:path_logs]} \
+ -E cluster.routing.allocation.disk.threshold_enabled=false \
+ -E network.host=#{arguments[:network_host]} \
+ -E script.inline=true \
+ -E script.stored=true \
+ -E node.attr.testattr=test \
+ -E path.repo=/tmp \
+ -E repositories.url.allowed_urls=http://snapshot.test* \
+ -E discovery.zen.minimum_master_nodes=#{arguments[:number_of_nodes]-1} \
+ -E logger.level=DEBUG \
+ #{arguments[:es_params]} \
+ > /dev/null
+ COMMAND
+ }
+ }
+
+ # Create a new instance of the Cluster class
+ #
+ # @option arguments [String] :cluster_name Cluster name (default: `elasticsearch_test`)
+ # @option arguments [Integer] :nodes Number of desired nodes (default: 2)
+ # @option arguments [String] :command Elasticsearch command (default: `elasticsearch`)
+ # @option arguments [String] :port Starting port number; will be auto-incremented (default: 9250)
+ # @option arguments [String] :node_name The node name (will be appended with a number)
+ # @option arguments [String] :path_data Path to the directory to store data in
+ # @option arguments [String] :path_work Path to the directory with auxiliary files
+ # @option arguments [String] :path_logs Path to the directory with log files
+ # @option arguments [Boolean] :multicast_enabled Whether multicast is enabled (default: true)
+ # @option arguments [Integer] :timeout Timeout when starting the cluster (default: 30)
+ # @option arguments [String] :network_host The host that nodes will bind on and publish to
+ # @option arguments [Boolean] :clear_cluster Wipe out cluster content on startup (default: true)
+ #
+ # You can also use environment variables to set the constructor options (see source).
+ #
+ # @see Cluster#start
+ #
+ def initialize(arguments={})
+ @arguments = arguments
+
+ @arguments[:command] ||= ENV.fetch('TEST_CLUSTER_COMMAND', 'elasticsearch')
+ @arguments[:port] ||= ENV.fetch('TEST_CLUSTER_PORT', 9250).to_i
+ @arguments[:cluster_name] ||= ENV.fetch('TEST_CLUSTER_NAME', __default_cluster_name).chomp
+ @arguments[:node_name] ||= ENV.fetch('TEST_CLUSTER_NODE_NAME', 'node')
+ @arguments[:path_data] ||= ENV.fetch('TEST_CLUSTER_DATA', '/tmp/elasticsearch_test')
+ @arguments[:path_work] ||= ENV.fetch('TEST_CLUSTER_TMP', '/tmp')
+ @arguments[:path_logs] ||= ENV.fetch('TEST_CLUSTER_LOGS', '/tmp/log/elasticsearch')
+ @arguments[:es_params] ||= ENV.fetch('TEST_CLUSTER_PARAMS', '')
+ @arguments[:multicast_enabled] ||= ENV.fetch('TEST_CLUSTER_MULTICAST', 'true')
+ @arguments[:timeout] ||= ENV.fetch('TEST_CLUSTER_TIMEOUT', 30).to_i
+ @arguments[:number_of_nodes] ||= ENV.fetch('TEST_CLUSTER_NODES', 2).to_i
+ @arguments[:network_host] ||= ENV.fetch('TEST_CLUSTER_NETWORK_HOST', __default_network_host)
+
+ @clear_cluster = !!@arguments[:clear_cluster] || (ENV.fetch('TEST_CLUSTER_CLEAR', 'true') != 'false')
+
+ # Make sure `cluster_name` is not dangerous
+ raise ArgumentError, "The `cluster_name` argument cannot be empty string or a slash" \
+ if @arguments[:cluster_name] =~ /^[\/\\]?$/
end
- # Check for proceses running
- if `ps -p #{pids.join(' ')}`.split("\n").size < @@number_of_nodes+1
- STDERR.puts "", "[!!!] Process failed to start (see output above)".ansi(:red)
- exit(1)
- end
+ # Starts a cluster
+ #
+ # Launches the specified number of nodes in a test-suitable configuration and prints
+ # information about the cluster -- unless this specific cluster is already running.
+ #
+ # @example Start a cluster with the default configuration (2 nodes, installed version, etc)
+ # Elasticsearch::Extensions::Test::Cluster::Cluster.new.start
+ #
+ # @example Start a cluster with a custom configuration
+ # Elasticsearch::Extensions::Test::Cluster::Cluster.new(
+ # cluster_name: 'my-cluster',
+ # nodes: 3,
+ # node_name: 'my-node',
+ # port: 9350
+ # ).start
+ #
+ # @example Start a cluster with a different Elasticsearch version
+ # Elasticsearch::Extensions::Test::Cluster::Cluster.new(
+ # command: "/usr/local/Cellar/elasticsearch/1.0.0.Beta2/bin/elasticsearch"
+ # ).start
+ #
+ # @return Boolean,Array
+ # @see Cluster#stop
+ #
+ def start
+ if self.running?
+ STDOUT.print "[!] Elasticsearch cluster already running".ansi(:red)
+ return false
+ end
- wait_for_green(arguments[:port], arguments[:timeout])
- return true
- end
+ __remove_cluster_data
- # Stop the cluster.
- #
- # Fetches the PID numbers from "Nodes Info" API and terminates matching nodes.
- #
- # @example Stop the default cluster
- # Elasticsearch::Extensions::Test::Cluster.stop
- #
- # @example Stop the cluster reachable on specific port
- # Elasticsearch::Extensions::Test::Cluster.stop port: 9350
- #
- # @return Boolean
- # @see Cluster#start Cluster.start
- #
- def stop(arguments={})
- arguments[:port] ||= (ENV['TEST_CLUSTER_PORT'] || 9250).to_i
+ STDOUT.print "Starting ".ansi(:faint) + arguments[:number_of_nodes].to_s.ansi(:bold, :faint) +
+ " Elasticsearch nodes..".ansi(:faint)
+ pids = []
- nodes = begin
- JSON.parse(Net::HTTP.get(URI("http://localhost:#{arguments[:port]}/_nodes/?process")))
- rescue Exception => e
- STDERR.puts "[!] Exception raised when stopping the cluster: #{e.inspect}".ansi(:red)
- nil
- end
+ STDERR.puts "Using Elasticsearch version [#{version}]" if ENV['DEBUG']
- return false if nodes.nil? or nodes.empty?
+ arguments[:number_of_nodes].times do |n|
+ n += 1
+ command = __command(version, arguments, n)
+ STDERR.puts command.gsub(/ {1,}/, ' ') if ENV['DEBUG']
- pids = nodes['nodes'].map { |id, info| info['process']['id'] }
+ pid = Process.spawn(command)
+ Process.detach pid
+ pids << pid
+ end
- unless pids.empty?
- print "\nStopping Elasticsearch nodes... ".ansi(:faint)
- pids.each_with_index do |pid, i|
- ['INT','KILL'].each do |signal|
- begin
- Process.kill signal, pid
- rescue Exception => e
- print "[#{e.class}] PID #{pid} not found. ".ansi(:red)
- end
+ __check_for_running_processes(pids)
+ wait_for_green
+ __print_cluster_info
- # Give the system some breathing space to finish...
- sleep 1
+ return true
+ end
- # Check that pid really is dead
- begin
- Process.getpgid( pid )
- # `getpgid` will raise error if pid is dead, so if we get here, try next signal.
- next
- rescue Errno::ESRCH
- print "stopped PID #{pid} with #{signal} signal. ".ansi(:green)
- break # pid is dead
+ # Stops the cluster
+ #
+ # Fetches the PID numbers from "Nodes Info" API and terminates matching nodes.
+ #
+ # @example Stop the default cluster
+ # Elasticsearch::Extensions::Test::Cluster::Cluster.new.stop
+ #
+ # @example Stop the cluster reachable on specific port
+ # Elasticsearch::Extensions::Test::Cluster::Cluster.new(port: 9350).stop
+ #
+ # @return Boolean,Array
+ # @see Cluster#start
+ #
+ def stop
+ begin
+ nodes = __get_nodes
+ rescue Exception => e
+ STDERR.puts "[!] Exception raised when stopping the cluster: #{e.inspect}".ansi(:red)
+ nil
+ end
+
+ return false if nodes.nil? or nodes.empty?
+
+ pids = nodes['nodes'].map { |id, info| info['process']['id'] }
+
+ unless pids.empty?
+ STDOUT.print "\nStopping Elasticsearch nodes... ".ansi(:faint)
+ pids.each_with_index do |pid, i|
+ ['INT','KILL'].each do |signal|
+ begin
+ Process.kill signal, pid
+ rescue Exception => e
+ STDOUT.print "[#{e.class}] PID #{pid} not found. ".ansi(:red)
+ end
+
+ # Give the system some breathing space to finish...
+ Kernel.sleep 1
+
+ # Check that pid really is dead
+ begin
+ Process.getpgid pid
+ # `getpgid` will raise error if pid is dead, so if we get here, try next signal
+ next
+ rescue Errno::ESRCH
+ STDOUT.print "Stopped PID #{pid}".ansi(:green) +
+ (ENV['DEBUG'] ? " with #{signal} signal".ansi(:green) : '') +
+ ". ".ansi(:green)
+ break # pid is dead
+ end
end
end
+ STDOUT.puts
+ else
+ return false
end
- puts
- else
- false
+
+ return pids
end
- return pids
- end
+ # Returns true when a specific test node is running within the cluster
+ #
+ # @return Boolean
+ #
+ def running?
+ if cluster_health = Timeout::timeout(0.25) { __get_cluster_health } rescue nil
+ return cluster_health['cluster_name'] == arguments[:cluster_name] && \
+ cluster_health['number_of_nodes'] == arguments[:number_of_nodes]
+ end
+ return false
+ end
- # Returns true when a specific test node is running within the cluster.
- #
- # @option arguments [Integer] :on The port on which the node is running.
- # @option arguments [String] :as The cluster name.
- #
- # @return Boolean
- #
- def running?(arguments={})
- port = arguments[:on] || (ENV['TEST_CLUSTER_PORT'] || 9250).to_i
- cluster_name = arguments[:as] || ENV['TEST_CLUSTER_NAME'] || 'elasticsearch_test'
+ # Waits until the cluster is green and prints information about it
+ #
+ # @return Boolean
+ #
+ def wait_for_green
+ __wait_for_status('green', 60)
+ end
- if cluster_health = Timeout::timeout(0.25) { __get_cluster_health(port) } rescue nil
- return cluster_health['cluster_name'] == cluster_name && \
- cluster_health['number_of_nodes'] == @@number_of_nodes
+ # Returns the major version of Elasticsearch
+ #
+ # @return String
+ # @see __determine_version
+ #
+ def version
+ @version ||= __determine_version
end
- return false
- end
- # Waits until the cluster is green and prints information
- #
- # @example Print the information about the default cluster
- # Elasticsearch::Extensions::Test::Cluster.wait_for_green
- #
- # @param (see #__wait_for_status)
- #
- # @return Boolean
- #
- def wait_for_green(port=9250, timeout=60)
- __wait_for_status('green', port, timeout)
- end
- # Blocks the process and waits for the cluster to be in a "green" state.
- #
- # Prints information about the cluster on STDOUT if the cluster is available.
- #
- # @param status [String] The status to wait for (yellow, green)
- # @param port [Integer] The port on which the cluster is reachable
- # @param timeout [Integer] The explicit timeout for the operation
- #
- # @api private
- #
- # @return Boolean
- #
- def __wait_for_status(status='green', port=9250, timeout=30)
- uri = URI("http://localhost:#{port}/_cluster/health?wait_for_status=#{status}")
-
- Timeout::timeout(timeout) do
- loop do
- response = begin
- JSON.parse(Net::HTTP.get(uri))
- rescue Exception => e
- STDERR.puts e.inspect if ENV['DEBUG']
- nil
+ # Returns default `:network_host` setting based on the version
+ #
+ # @api private
+ #
+ # @return String
+ #
+ def __default_network_host
+ case version
+ when /^0|^1/
+ '0.0.0.0'
+ when /^2/
+ '0.0.0.0'
+ when /^5/
+ '_local_'
+ else
+ raise RuntimeError, "Cannot determine default network host from version [#{version}]"
+ end
+ end
+
+ # Returns a reasonably unique cluster name
+ #
+ # @api private
+ #
+ # @return String
+ #
+ def __default_cluster_name
+ "elasticsearch-test-#{Socket.gethostname.downcase}"
+ end
+
+ # Returns the HTTP URL for the cluster based on `:network_host` setting
+ #
+ # @api private
+ #
+ # @return String
+ #
+ def __cluster_url
+ if '_local_' == arguments[:network_host]
+ "http://localhost:#{arguments[:port]}"
+ else
+ "http://#{arguments[:network_host]}:#{arguments[:port]}"
+ end
+ end
+
+ # Determine Elasticsearch version to be launched
+ #
+ # Tries to parse the version number from the `lib/elasticsearch-X.Y.Z.jar` file,
+ # it not available, uses `elasticsearch --version` or `elasticsearch -v`
+ #
+ # @api private
+ #
+ # @return String
+ #
+ def __determine_version
+ path_to_lib = File.dirname(arguments[:command]) + '/../lib/'
+
+ jar = Dir.entries(path_to_lib).select { |f| f.start_with? 'elasticsearch' }.first if File.exist? path_to_lib
+
+ version = if jar
+ if m = jar.match(/elasticsearch\-(\d+\.\d+.\d+).*/)
+ m[1]
+ else
+ raise RuntimeError, "Cannot determine Elasticsearch version from jar [#{jar}]"
+ end
+ else
+ STDERR.puts "[!] Cannot find Elasticsearch .jar from path to command [#{arguments[:command]}], using `elasticsearch --version`" if ENV['DEBUG']
+
+ output = ''
+
+ begin
+ # First, try the new `--version` syntax...
+ STDERR.puts "Running [#{arguments[:command]} --version] to determine version" if ENV['DEBUG']
+ Timeout::timeout(10) { output = `#{arguments[:command]} --version` }
+ rescue Timeout::Error
+ # ...else, the new `-v` syntax
+ STDERR.puts "Running [#{arguments[:command]} -v] to determine version" if ENV['DEBUG']
+ output = `#{arguments[:command]} -v`
+ end
+
+ STDERR.puts "> #{output}" if ENV['DEBUG']
+
+ if output.empty?
+ raise RuntimeError, "Cannot determine Elasticsearch version from [#{arguments[:command]} --version] or [#{arguments[:command]} -v]"
end
- STDERR.puts response.inspect if response && ENV['DEBUG']
+ if m = output.match(/Version: (\d\.\d.\d).*,/)
+ m[1]
+ else
+ raise RuntimeError, "Cannot determine Elasticsearch version from elasticsearch --version output [#{output}]"
+ end
+ end
+
+ case version
+ when /^0\.90.*/
+ '0.90'
+ when /^1\..*/
+ '1.0'
+ when /^2\..*/
+ '2.0'
+ when /^5\..*/
+ '5.0'
+ else
+ raise RuntimeError, "Cannot determine major version from [#{version}]"
+ end
+ end
+
+ # Returns the launch command for a specific version
+ #
+ # @api private
+ #
+ # @return String
+ #
+ def __command(version, arguments, node_number)
+ if command = COMMANDS[version]
+ command.call(arguments, node_number)
+ else
+ raise ArgumentError, "Cannot find command for version [#{version}]"
+ end
+ end
- if response && response['status'] == status && ( @@number_of_nodes.nil? || @@number_of_nodes == response['number_of_nodes'].to_i )
- __print_cluster_info(port) and break
+ # Blocks the process and waits for the cluster to be in a "green" state
+ #
+ # Prints information about the cluster on STDOUT if the cluster is available.
+ #
+ # @param status [String] The status to wait for (yellow, green)
+ # @param timeout [Integer] The explicit timeout for the operation
+ #
+ # @api private
+ #
+ # @return Boolean
+ #
+ def __wait_for_status(status='green', timeout=30)
+ Timeout::timeout(timeout) do
+ loop do
+ response = __get_cluster_health(status)
+
+ if response && response['status'] == status && ( arguments[:number_of_nodes].nil? || arguments[:number_of_nodes].to_i == response['number_of_nodes'].to_i )
+ break
+ end
+
+ STDOUT.print '.'.ansi(:faint)
+ sleep 1
end
+ end
- print '.'.ansi(:faint)
- sleep 1
+ return true
+ end
+
+ # Print information about the cluster on STDOUT
+ #
+ # @api private
+ #
+ # @return Nil
+ #
+ def __print_cluster_info
+ health = JSON.parse(Net::HTTP.get(URI("#{__cluster_url}/_cluster/health")))
+ nodes = if version == '0.90'
+ JSON.parse(Net::HTTP.get(URI("#{__cluster_url}/_nodes/?process&http")))
+ else
+ JSON.parse(Net::HTTP.get(URI("#{__cluster_url}/_nodes/process,http")))
+ end
+ master = JSON.parse(Net::HTTP.get(URI("#{__cluster_url}/_cluster/state")))['master_node']
+
+ puts "\n",
+ ('-'*80).ansi(:faint),
+ 'Cluster: '.ljust(20).ansi(:faint) + health['cluster_name'].to_s.ansi(:faint),
+ 'Status: '.ljust(20).ansi(:faint) + health['status'].to_s.ansi(:faint),
+ 'Nodes: '.ljust(20).ansi(:faint) + health['number_of_nodes'].to_s.ansi(:faint)
+
+ nodes['nodes'].each do |id, info|
+ m = id == master ? '*' : '+'
+ puts ''.ljust(20) +
+ "#{m} ".ansi(:faint) +
+ "#{info['name'].ansi(:bold)} ".ansi(:faint) +
+ "| version: #{info['version'] rescue 'N/A'}, ".ansi(:faint) +
+ "pid: #{info['process']['id'] rescue 'N/A'}, ".ansi(:faint) +
+ "address: #{info['http']['bound_address'] rescue 'N/A'}".ansi(:faint)
end
end
- return true
- end
+ # Tries to load cluster health information
+ #
+ # @api private
+ #
+ # @return Hash,Nil
+ #
+ def __get_cluster_health(status=nil)
+ uri = URI("#{__cluster_url}/_cluster/health")
+ uri.query = "wait_for_status=#{status}" if status
+
+ begin
+ response = Net::HTTP.get(uri)
+ rescue Exception => e
+ STDERR.puts e.inspect if ENV['DEBUG']
+ return nil
+ end
- # Print information about the cluster on STDOUT
- #
- # @api private
- #
- def __print_cluster_info(port)
- health = JSON.parse(Net::HTTP.get(URI("http://localhost:#{port}/_cluster/health")))
- nodes = JSON.parse(Net::HTTP.get(URI("http://localhost:#{port}/_nodes/process,http")))
- master = JSON.parse(Net::HTTP.get(URI("http://localhost:#{port}/_cluster/state")))['master_node']
-
- puts "\n",
- ('-'*80).ansi(:faint),
- 'Cluster: '.ljust(20).ansi(:faint) + health['cluster_name'].to_s.ansi(:faint),
- 'Status: '.ljust(20).ansi(:faint) + health['status'].to_s.ansi(:faint),
- 'Nodes: '.ljust(20).ansi(:faint) + health['number_of_nodes'].to_s.ansi(:faint)
-
- nodes['nodes'].each do |id, info|
- m = id == master ? '*' : '+'
- puts ''.ljust(20) +
- "#{m} ".ansi(:faint) +
- "#{info['name'].ansi(:bold)} ".ansi(:faint) +
- "| version: #{info['version'] rescue 'N/A'}, ".ansi(:faint) +
- "pid: #{info['process']['id'] rescue 'N/A'}, ".ansi(:faint) +
- "address: #{info['http']['bound_address'] rescue 'N/A'}".ansi(:faint)
+ JSON.parse(response)
end
- end
- # Tries to load cluster health information
- #
- # @api private
- #
- def __get_cluster_health(port=9250)
- uri = URI("http://localhost:#{port}/_cluster/health")
- if response = Net::HTTP.get(uri) rescue nil
- return JSON.parse(response)
+ # Remove the data directory (unless it has been disabled by arguments)
+ #
+ # @api private
+ #
+ def __remove_cluster_data
+ # Wipe out data on disk for this cluster name by default
+ FileUtils.rm_rf "#{arguments[:path_data]}/#{arguments[:cluster_name]}" if @clear_cluster
end
- end
- extend self
+
+ # Check whether process for PIDs are running
+ #
+ # @api private
+ #
+ def __check_for_running_processes(pids)
+ if `ps -p #{pids.join(' ')}`.split("\n").size < arguments[:number_of_nodes]+1
+ STDERR.puts "", "[!!!] Process failed to start (see output above)".ansi(:red)
+ exit(1)
+ end
+ end
+
+ # Get the information about nodes
+ #
+ # @api private
+ #
+ def __get_nodes
+ JSON.parse(Net::HTTP.get(URI("#{__cluster_url}/_nodes/process")))
+ end
+ end
end
end
end
diff --git a/elasticsearch-extensions/lib/elasticsearch/extensions/version.rb b/elasticsearch-extensions/lib/elasticsearch/extensions/version.rb
index c3046cc..2a292f8 100644
--- a/elasticsearch-extensions/lib/elasticsearch/extensions/version.rb
+++ b/elasticsearch-extensions/lib/elasticsearch/extensions/version.rb
@@ -1,5 +1,5 @@
module Elasticsearch
module Extensions
- VERSION = "0.0.18"
+ VERSION = "0.0.22"
end
end
diff --git a/elasticsearch-extensions/test/reindex/integration/reindex_test.rb b/elasticsearch-extensions/test/reindex/integration/reindex_test.rb
new file mode 100644
index 0000000..86bf31a
--- /dev/null
+++ b/elasticsearch-extensions/test/reindex/integration/reindex_test.rb
@@ -0,0 +1,81 @@
+require 'test_helper'
+require 'elasticsearch/extensions/reindex'
+
+class Elasticsearch::Extensions::ReindexIntegrationTest < Elasticsearch::Test::IntegrationTestCase
+ context "The Reindex extension" do
+ setup do
+ @port = (ENV['TEST_CLUSTER_PORT'] || 9250).to_i
+
+ @logger = ::Logger.new(STDERR)
+ @logger.formatter = proc do |severity, datetime, progname, msg|
+ color = case severity
+ when /INFO/ then :green
+ when /ERROR|WARN|FATAL/ then :red
+ when /DEBUG/ then :cyan
+ else :white
+ end
+ ANSI.ansi(severity[0] + ' ', color, :faint) + ANSI.ansi(msg, :white, :faint) + "\n"
+ end
+
+ @client = Elasticsearch::Client.new host: "localhost:#{@port}", logger: @logger
+ @client.indices.delete index: '_all'
+
+ @client.index index: 'test1', type: 'd', id: 1, body: { title: 'TEST 1', category: 'one' }
+ @client.index index: 'test1', type: 'd', id: 2, body: { title: 'TEST 2', category: 'two' }
+ @client.index index: 'test1', type: 'd', id: 3, body: { title: 'TEST 3', category: 'three' }
+ @client.indices.refresh index: 'test1'
+
+ @client.cluster.health wait_for_status: 'yellow'
+ end
+
+ should "copy documents from one index to another" do
+ reindex = Elasticsearch::Extensions::Reindex.new \
+ source: { index: 'test1', client: @client },
+ target: { index: 'test2' },
+ refresh: true
+
+ result = reindex.perform
+
+ assert_equal 0, result[:errors]
+ assert_equal 3, @client.search(index: 'test2')['hits']['total']
+ end
+
+ should "transform documents with a lambda" do
+ reindex = Elasticsearch::Extensions::Reindex.new \
+ source: { index: 'test1', client: @client },
+ target: { index: 'test2' },
+ transform: lambda { |d| d['_source']['category'].upcase! },
+ refresh: true
+
+ result = reindex.perform
+
+ assert_equal 0, result[:errors]
+ assert_equal 3, @client.search(index: 'test2')['hits']['total']
+ assert_equal 'ONE', @client.get(index: 'test2', type: 'd', id: 1)['_source']['category']
+ end
+
+ should "return the number of errors" do
+ @client.indices.create index: 'test3', body: { mappings: { d: { properties: { category: { type: 'integer' } }}}}
+ @client.cluster.health wait_for_status: 'yellow'
+
+ reindex = Elasticsearch::Extensions::Reindex.new \
+ source: { index: 'test1', client: @client },
+ target: { index: 'test3', transform: lambda { |d| d['_source']['category'].upcase!; d } },
+ refresh: true
+
+ result = reindex.perform
+
+ assert_equal 3, result[:errors]
+ assert_equal 0, @client.search(index: 'test3')['hits']['total']
+ end
+
+ should "reindex via the API integration" do
+ @client.reindex source: { index: 'test1' }, target: { index: 'test4' }
+
+ @client.indices.refresh index: 'test4'
+
+ assert_equal 3, @client.search(index: 'test4')['hits']['total']
+ end
+ end
+
+end
diff --git a/elasticsearch-extensions/test/reindex/unit/reindex_test.rb b/elasticsearch-extensions/test/reindex/unit/reindex_test.rb
new file mode 100644
index 0000000..7bf3a38
--- /dev/null
+++ b/elasticsearch-extensions/test/reindex/unit/reindex_test.rb
@@ -0,0 +1,106 @@
+require 'test_helper'
+require 'elasticsearch/extensions/reindex'
+
+class Elasticsearch::Extensions::ReindexTest < Test::Unit::TestCase
+ context "The Reindex extension module" do
+ DEFAULT_OPTIONS = { source: { index: 'foo', client: Object.new }, target: { index: 'bar' } }
+
+ should "require options" do
+ assert_raise ArgumentError do
+ Elasticsearch::Extensions::Reindex.new
+ end
+ end
+
+ should "allow to initialize the class" do
+ assert_instance_of Elasticsearch::Extensions::Reindex::Reindex,
+ Elasticsearch::Extensions::Reindex.new(DEFAULT_OPTIONS)
+ end
+
+ should "add the reindex to the API and client" do
+ assert_includes Elasticsearch::API::Actions.public_instance_methods.sort, :reindex
+ assert_respond_to Elasticsearch::Client.new, :reindex
+ end
+
+ should "pass the client when used in API mode" do
+ client = Elasticsearch::Client.new
+
+ Elasticsearch::Extensions::Reindex::Reindex
+ .expects(:new)
+ .with({source: { client: client }})
+ .returns(stub perform: {})
+
+ client.reindex
+ end
+
+ context "when performing the operation" do
+ setup do
+ d = { '_id' => 'foo', '_type' => 'type', '_source' => { 'foo' => 'bar' } }
+ @default_response = { 'hits' => { 'hits' => [d] } }
+ @empty_response = { 'hits' => { 'hits' => [] } }
+ @bulk_request = [{ index: {
+ '_index' => 'bar',
+ '_type' => d['_type'],
+ '_id' => d['_id'],
+ 'data' => d['_source']
+ } }]
+ @bulk_response = {'errors'=>false, 'items' => [{'index' => {}}]}
+ @bulk_response_error = {'errors'=>true, 'items' => [{'index' => {}},{'index' => {'error' => 'FOOBAR'}}]}
+ end
+
+ should "scroll through the index and save batches in bulk" do
+ client = mock()
+ subject = Elasticsearch::Extensions::Reindex.new source: { index: 'foo', client: client },
+ target: { index: 'bar' }
+
+ client.expects(:search).returns({ '_scroll_id' => 'scroll_id_1' })
+ client.expects(:scroll).returns(@default_response)
+ .then.returns(@empty_response)
+ .times(2)
+ client.expects(:bulk).with(body: @bulk_request).returns(@bulk_response)
+
+ result = subject.perform
+
+ assert_equal 0, result[:errors]
+ end
+
+ should "return the number of errors" do
+ client = mock()
+ subject = Elasticsearch::Extensions::Reindex.new source: { index: 'foo', client: client },
+ target: { index: 'bar' }
+
+ client.expects(:search).returns({ '_scroll_id' => 'scroll_id_1' })
+ client.expects(:scroll).returns(@default_response)
+ .then.returns(@empty_response)
+ .times(2)
+ client.expects(:bulk).with(body: @bulk_request).returns(@bulk_response_error)
+
+ result = subject.perform
+
+ assert_equal 1, result[:errors]
+ end
+
+ should "transform the documents with a lambda" do
+ client = mock()
+ subject = Elasticsearch::Extensions::Reindex.new \
+ source: { index: 'foo', client: client },
+ target: { index: 'bar' },
+ transform: lambda { |d| d['_source']['foo'].upcase!; d }
+
+ client.expects(:search).returns({ '_scroll_id' => 'scroll_id_1' })
+ client.expects(:scroll).returns(@default_response)
+ .then.returns(@empty_response)
+ .times(2)
+ client.expects(:bulk).with do |arguments|
+ assert_equal 'BAR', arguments[:body][0][:index]['data']['foo']
+ true
+ end
+ .returns(@bulk_response)
+
+ result = subject.perform
+
+ assert_equal 0, result[:errors]
+ end
+ end
+
+ end
+end
diff --git a/elasticsearch-extensions/test/test/cluster/integration/cluster_test.rb b/elasticsearch-extensions/test/test/cluster/integration/cluster_test.rb
new file mode 100644
index 0000000..7d0e233
--- /dev/null
+++ b/elasticsearch-extensions/test/test/cluster/integration/cluster_test.rb
@@ -0,0 +1,29 @@
+require 'test_helper'
+require 'pathname'
+
+require 'elasticsearch/extensions/test/cluster'
+
+class Elasticsearch::Extensions::TestClusterIntegrationTest < Test::Unit::TestCase
+ context "The Test::Cluster" do
+ PATH_TO_BUILDS = Pathname(File.expand_path('../../../../../../tmp/builds', __FILE__))
+
+ unless PATH_TO_BUILDS.exist?
+ puts "Path to builds doesn't exist, skipping TestClusterIntegrationTest"
+ exit(0)
+ end
+
+ @builds = begin
+ PATH_TO_BUILDS.entries.reject { |f| f.to_s =~ /^\./ }
+ rescue Errno::ENOENT
+ []
+ end
+
+ @builds.each do |build|
+ should "start and stop #{build.to_s}" do
+ puts ("----- #{build.to_s} " + "-"*(80-7-build.to_s.size)).to_s.ansi(:bold)
+ Elasticsearch::Extensions::Test::Cluster.start command: PATH_TO_BUILDS.join(build.join('bin/elasticsearch')).to_s
+ Elasticsearch::Extensions::Test::Cluster.stop command: PATH_TO_BUILDS.join(build.join('bin/elasticsearch')).to_s
+ end
+ end
+ end
+end
diff --git a/elasticsearch-extensions/test/test/cluster/unit/cluster_test.rb b/elasticsearch-extensions/test/test/cluster/unit/cluster_test.rb
new file mode 100644
index 0000000..bb9bf6f
--- /dev/null
+++ b/elasticsearch-extensions/test/test/cluster/unit/cluster_test.rb
@@ -0,0 +1,280 @@
+require 'test_helper'
+
+require 'elasticsearch/extensions/test/cluster'
+
+class Elasticsearch::Extensions::TestClusterTest < Test::Unit::TestCase
+ include Elasticsearch::Extensions::Test
+ context "The Test::Cluster" do
+ context "module" do
+ should "delegate the methods to the class" do
+ Cluster::Cluster
+ .expects(:new)
+ .with({foo: 'bar'})
+ .returns(mock start: true, stop: true, running?: true, wait_for_green: true)
+ .times(4)
+
+ Elasticsearch::Extensions::Test::Cluster.start foo: 'bar'
+ Elasticsearch::Extensions::Test::Cluster.stop foo: 'bar'
+ Elasticsearch::Extensions::Test::Cluster.running? foo: 'bar'
+ Elasticsearch::Extensions::Test::Cluster.wait_for_green foo: 'bar'
+ end
+ end
+
+ context "class" do
+ setup do
+ Elasticsearch::Extensions::Test::Cluster::Cluster.any_instance.stubs(:__default_network_host).returns('_local_')
+
+ @subject = Elasticsearch::Extensions::Test::Cluster::Cluster.new
+ @subject.stubs(:__remove_cluster_data).returns(true)
+ end
+
+ teardown do
+ ENV.delete('TEST_CLUSTER_PORT')
+ end
+
+ should "be initialized with parameters" do
+ c = Cluster::Cluster.new port: 9400
+
+ assert_equal 9400, c.arguments[:port]
+ end
+
+ should "take parameters from environment variables" do
+ ENV['TEST_CLUSTER_PORT'] = '9400'
+
+ c = Cluster::Cluster.new
+
+ assert_equal 9400, c.arguments[:port]
+ end
+
+ should "raise exception for dangerous cluster name" do
+ assert_raise(ArgumentError) { Cluster::Cluster.new cluster_name: '' }
+ assert_raise(ArgumentError) { Cluster::Cluster.new cluster_name: '/' }
+ end
+
+ should "have a version" do
+ @subject.unstub(:version)
+ @subject.expects(:__determine_version).returns('2.0')
+ assert_equal '2.0', @subject.version
+ end
+
+ should "have a default network host" do
+ Cluster::Cluster.any_instance.unstub(:__default_network_host)
+ Cluster::Cluster.any_instance.stubs(:version).returns('5.0')
+
+ assert_equal '_local_', Cluster::Cluster.new.__default_network_host
+ end
+
+ should "have a default cluster name" do
+ Socket.stubs(:gethostname).returns('FOOBAR')
+
+ assert_equal 'elasticsearch-test-foobar', Cluster::Cluster.new.__default_cluster_name
+ end
+
+ should "have a cluster URL for new versions" do
+ assert_equal 'http://localhost:9250', Cluster::Cluster.new(network_host: '_local_').__cluster_url
+ end
+
+ should "have a cluster URL for old versions" do
+ assert_equal 'http://192.168.1.1:9250', Cluster::Cluster.new(network_host: '192.168.1.1').__cluster_url
+ end
+
+ should "return corresponding command to a version" do
+ assert_match /\-D es\.foreground=yes/, @subject.__command('2.0', @subject.arguments, 1)
+ end
+
+ should "raise an error when a corresponding command cannot be found" do
+ assert_raise ArgumentError do
+ @subject.__command('FOOBAR', @subject.arguments, 1)
+ end
+ end
+
+ context "when starting a cluster, " do
+ should "return false when it's already running" do
+ Process.expects(:spawn).never
+
+ c = Cluster::Cluster.new
+
+ c.expects(:running?).returns(true)
+
+ assert_equal false, c.start
+ end
+
+ should "start the specified number of nodes" do
+ Process.expects(:spawn).times(3)
+ Process.expects(:detach).times(3)
+
+ c = Cluster::Cluster.new number_of_nodes: 3
+
+ c.expects(:running?).returns(false)
+
+ c.unstub(:__remove_cluster_data)
+ c.expects(:__remove_cluster_data).returns(true)
+
+ c.expects(:wait_for_green).returns(true)
+ c.expects(:__check_for_running_processes).returns(true)
+ c.expects(:__determine_version).returns('5.0')
+ c.expects(:__print_cluster_info).returns(true)
+
+ assert_equal true, c.start
+ end
+ end
+
+ context "when stopping a cluster" do
+ setup do
+ @subject = Elasticsearch::Extensions::Test::Cluster::Cluster.new
+ end
+
+ should "print information about an exception" do
+ @subject.expects(:__get_nodes).raises(Errno::ECONNREFUSED)
+
+ assert_nothing_raised do
+ assert_equal false, @subject.stop
+ end
+ end
+
+ should "return false when the nodes are empty" do
+ @subject.expects(:__get_nodes).returns({})
+ assert_equal false, @subject.stop
+ end
+
+ should "kill each node" do
+ @subject.expects(:__get_nodes).returns({'nodes' => { 'n1' => { 'process' => { 'id' => 1 }},
+ 'n2' => { 'process' => { 'id' => 2 }} }})
+
+ Kernel.stubs(:sleep)
+ Process.expects(:kill).with('INT', 1)
+ Process.expects(:kill).with('INT', 2)
+ Process.expects(:getpgid).with(1).raises(Errno::ESRCH)
+ Process.expects(:getpgid).with(2).raises(Errno::ESRCH)
+
+ assert_equal [1, 2], @subject.stop
+ end
+ end
+
+ context "when checking if the cluster is running" do
+ setup do
+ @subject = Elasticsearch::Extensions::Test::Cluster::Cluster.new \
+ cluster_name: 'test',
+ number_of_nodes: 2
+ end
+
+ should "return true" do
+ @subject.expects(:__get_cluster_health).returns({'cluster_name' => 'test', 'number_of_nodes' => 2})
+ assert_equal true, @subject.running?
+ end
+
+ should "return false" do
+ @subject.expects(:__get_cluster_health).returns({'cluster_name' => 'test', 'number_of_nodes' => 1})
+ assert_equal false, @subject.running?
+ end
+ end
+
+ context "when waiting for the green state" do
+ should "return true" do
+ @subject.expects(:__wait_for_status).returns(true)
+ assert_equal true, @subject.wait_for_green
+ end
+ end
+
+ context "when waiting for cluster state" do
+ setup do
+ @subject = Elasticsearch::Extensions::Test::Cluster::Cluster.new \
+ cluster_name: 'test',
+ number_of_nodes: 1
+ end
+
+ should "return true" do
+ @subject.stubs(:__print_cluster_info)
+
+ @subject
+ .expects(:__get_cluster_health)
+ .with('yellow')
+ .returns({'status' => 'yellow', 'cluster_name' => 'test', 'number_of_nodes' => 1})
+
+ @subject.__wait_for_status('yellow')
+ end
+ end
+
+ context "when getting the cluster health" do
+ should "return the response" do
+ Net::HTTP
+ .expects(:get)
+ .with(URI('http://localhost:9250/_cluster/health'))
+ .returns(JSON.dump({'status' => 'yellow', 'cluster_name' => 'test', 'number_of_nodes' => 1}))
+
+ @subject.__get_cluster_health
+ end
+
+ should "wait for status" do
+ Net::HTTP
+ .expects(:get)
+ .with(URI('http://localhost:9250/_cluster/health?wait_for_status=green'))
+ .returns(JSON.dump({'status' => 'yellow', 'cluster_name' => 'test', 'number_of_nodes' => 1}))
+
+ @subject.__get_cluster_health('green')
+ end
+ end
+
+ context "when getting the list of nodes" do
+ should "return the response" do
+ Net::HTTP
+ .expects(:get)
+ .with(URI('http://localhost:9250/_nodes/process'))
+ .returns(JSON.dump({'nodes' => { 'n1' => {}, 'n2' => {} } }))
+
+ assert_equal 'n1', @subject.__get_nodes['nodes'].keys.first
+ end
+ end
+
+ context "when determining a version" do
+ setup do
+ @subject = Elasticsearch::Extensions::Test::Cluster::Cluster.new command: '/foo/bar/bin/elasticsearch'
+ end
+
+ should "return version from lib/elasticsearch.X.Y.Z.jar" do
+ File.expects(:exist?).with('/foo/bar/bin/../lib/').returns(true)
+ Dir.expects(:entries).with('/foo/bar/bin/../lib/').returns(['foo.jar', 'elasticsearch-2.3.0.jar'])
+
+ assert_equal '2.0', @subject.__determine_version
+ end
+
+ should "return version from `elasticsearch -v`" do
+ File.expects(:exist?).with('/foo/bar/bin/../lib/').returns(false)
+
+ @subject.expects(:`)
+ .with("/foo/bar/bin/elasticsearch --version")
+ .returns('Version: 2.3.0-SNAPSHOT, Build: d1c86b0/2016-03-30T10:43:20Z, JVM: 1.8.0_60')
+
+ assert_equal '2.0', @subject.__determine_version
+ end
+
+ should "raise an exception when the version cannot be parsed from .jar" do
+ # Incorrect jar version
+ File.expects(:exist?).with('/foo/bar/bin/../lib/').returns(true)
+ Dir.expects(:entries).with('/foo/bar/bin/../lib/').returns(['elasticsearch-100.jar'])
+
+ assert_raise(RuntimeError) { @subject.__determine_version }
+ end
+
+ should "raise an exception when the version cannot be parsed from command output" do
+ File.expects(:exist?).with('/foo/bar/bin/../lib/').returns(false)
+
+ @subject.expects(:`)
+ .with("/foo/bar/bin/elasticsearch --version")
+ .returns('Version: FOOBAR')
+
+ assert_raise(RuntimeError) { @subject.__determine_version }
+ end
+
+ should "raise an exception when the version cannot be converted to short version" do
+ # There's no Elasticsearch version 3...
+ File.expects(:exist?).with('/foo/bar/bin/../lib/').returns(true)
+ Dir.expects(:entries).with('/foo/bar/bin/../lib/').returns(['elasticsearch-3.2.1.jar'])
+
+ assert_raise(RuntimeError) { @subject.__determine_version }
+ end
+ end
+ end
+
+ end
+end
diff --git a/elasticsearch-extensions/test/test_helper.rb b/elasticsearch-extensions/test/test_helper.rb
index 2bf62ec..fbbfd7e 100644
--- a/elasticsearch-extensions/test/test_helper.rb
+++ b/elasticsearch-extensions/test/test_helper.rb
@@ -8,7 +8,7 @@ end
if ENV['COVERAGE'] && ENV['CI'].nil? && !RUBY_1_8
require 'simplecov'
- SimpleCov.start { add_filter "/test|test_/" }
+ SimpleCov.start { add_filter "test_" }
end
if ENV['CI'] && !RUBY_1_8
@@ -23,12 +23,24 @@ require 'shoulda-context'
require 'mocha/setup'
require 'ansi/code'
require 'turn' unless ENV["TM_FILEPATH"] || ENV["NOTURN"] || RUBY_1_8
+require 'logger'
require 'elasticsearch/extensions'
+require 'elasticsearch/extensions/test/startup_shutdown'
+require 'elasticsearch/extensions/test/cluster'
module Elasticsearch
module Test
class IntegrationTestCase < ::Test::Unit::TestCase
+ extend Elasticsearch::Extensions::Test::StartupShutdown
+
+ startup do
+ Elasticsearch::Extensions::Test::Cluster.start(nodes: 2) if ENV['SERVER'] and not Elasticsearch::Extensions::Test::Cluster.running?
+ end
+
+ shutdown do
+ Elasticsearch::Extensions::Test::Cluster.stop if ENV['SERVER'] and Elasticsearch::Extensions::Test::Cluster.running?
+ end
end
end
end
diff --git a/elasticsearch-transport/Gemfile b/elasticsearch-transport/Gemfile
index 61bf628..56e0bac 100644
--- a/elasticsearch-transport/Gemfile
+++ b/elasticsearch-transport/Gemfile
@@ -3,14 +3,14 @@ source 'https://rubygems.org'
# Specify your gem's dependencies in elasticsearch-transport.gemspec
gemspec
-if File.exists? File.expand_path("../../elasticsearch-api/elasticsearch-api.gemspec", __FILE__)
+if File.exist? File.expand_path("../../elasticsearch-api/elasticsearch-api.gemspec", __FILE__)
gem 'elasticsearch-api', :path => File.expand_path("../../elasticsearch-api", __FILE__), :require => false
end
-if File.exists? File.expand_path("../../elasticsearch-extensions", __FILE__)
+if File.exist? File.expand_path("../../elasticsearch-extensions", __FILE__)
gem 'elasticsearch-extensions', :path => File.expand_path("../../elasticsearch-extensions", __FILE__), :require => false
end
-if File.exists? File.expand_path("../../elasticsearch/elasticsearch.gemspec", __FILE__)
+if File.exist? File.expand_path("../../elasticsearch/elasticsearch.gemspec", __FILE__)
gem 'elasticsearch', :path => File.expand_path("../../elasticsearch", __FILE__), :require => false
end
diff --git a/elasticsearch-transport/README.md b/elasticsearch-transport/README.md
index c19099d..7f8b9f8 100644
--- a/elasticsearch-transport/README.md
+++ b/elasticsearch-transport/README.md
@@ -90,6 +90,9 @@ Instead of Strings, you can pass host information as an array of Hashes:
Elasticsearch::Client.new hosts: [ { host: 'myhost1', port: 8080 }, { host: 'myhost2', port: 8080 } ]
+**NOTE:** When specifying multiple hosts, you probably want to enable the `retry_on_failure` option to
+ perform a failed request on another node (see the _Retrying on Failures_ chapter).
+
Common URL parts -- scheme, HTTP authentication credentials, URL prefixes, etc -- are handled automatically:
Elasticsearch::Client.new url: 'https://username:password@api.server.org:4430/search'
@@ -208,6 +211,14 @@ The reloading will timeout if not finished under 1 second by default. To change
Elasticsearch::Client.new hosts: ['localhost:9200', 'localhost:9201'], sniffer_timeout: 3
+**NOTE:** When using reloading hosts ("sniffing") together with authentication, just pass the scheme,
+ user and password with the host info -- or, for more clarity, in the `http` options:
+
+ Elasticsearch::Client.new host: 'localhost:9200',
+ http: { scheme: 'https', user: 'U', password: 'P' },
+ reload_connections: true,
+ reload_on_failure: true
+
### Connection Selector
By default, the client will rotate the connections in a round-robin fashion, using the
@@ -268,24 +279,6 @@ To use a specific adapter for _Faraday_, pass it as the `adapter` argument:
client.transport.connections.first.connection.builder.handlers
# => [Faraday::Adapter::NetHttpPersistent]
-To configure the _Faraday_ instance, pass a configuration block to the transport constructor:
-
- require 'typhoeus'
- require 'typhoeus/adapters/faraday'
-
- transport_configuration = lambda do |f|
- f.response :logger
- f.adapter :typhoeus
- end
-
- transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new \
- hosts: [ { host: 'localhost', port: '9200' } ],
- &transport_configuration
-
- # Pass the transport to the client
- #
- client = Elasticsearch::Client.new transport: transport
-
To pass options to the
[`Faraday::Connection`](https://github.com/lostisland/faraday/blob/master/lib/faraday/connection.rb)
constructor, use the `transport_options` key:
@@ -297,23 +290,45 @@ constructor, use the `transport_options` key:
ssl: { verify: false }
}
-You can also use a bundled [_Curb_](https://rubygems.org/gems/curb) based transport implementation:
+To configure the _Faraday_ instance directly, use a block:
- require 'curb'
- require 'elasticsearch/transport/transport/http/curb'
+ require 'typhoeus'
+ require 'typhoeus/adapters/faraday'
- client = Elasticsearch::Client.new transport_class: Elasticsearch::Transport::Transport::HTTP::Curb
+ client = Elasticsearch::Client.new(host: 'localhost', port: '9200') do |f|
+ f.response :logger
+ f.adapter :typhoeus
+ end
- client.transport.connections.first.connection
- # => #<Curl::Easy http://localhost:9200/>
+You can use any standard Faraday middleware and plugins in the configuration block,
+for example sign the requests for the [AWS Elasticsearch service](https://aws.amazon.com/elasticsearch-service/):
-It's possible to customize the _Curb_ instance by passing a block to the constructor as well
-(in this case, as an inline block):
+ require 'faraday_middleware/aws_signers_v4'
- transport = Elasticsearch::Transport::Transport::HTTP::Curb.new \
+ client = Elasticsearch::Client.new url: 'https://search-my-cluster-abc123....es.amazonaws.com' do |f|
+ f.request :aws_signers_v4,
+ credentials: Aws::Credentials.new(ENV['AWS_ACCESS_KEY'], ENV['AWS_SECRET_ACCESS_KEY']),
+ service_name: 'es',
+ region: 'us-east-1'
+ end
+
+You can also initialize the transport class yourself, and pass it to the client constructor
+as the `transport` argument:
+
+ require 'typhoeus'
+ require 'typhoeus/adapters/faraday'
+
+ transport_configuration = lambda do |f|
+ f.response :logger
+ f.adapter :typhoeus
+ end
+
+ transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new \
hosts: [ { host: 'localhost', port: '9200' } ],
- & lambda { |c| c.verbose = true }
+ &transport_configuration
+ # Pass the transport to the client
+ #
client = Elasticsearch::Client.new transport: transport
Instead of passing the transport to the constructor, you can inject it at run time:
@@ -342,6 +357,25 @@ Instead of passing the transport to the constructor, you can inject it at run ti
#
client.transport = faraday_client
+You can also use a bundled [_Curb_](https://rubygems.org/gems/curb) based transport implementation:
+
+ require 'curb'
+ require 'elasticsearch/transport/transport/http/curb'
+
+ client = Elasticsearch::Client.new transport_class: Elasticsearch::Transport::Transport::HTTP::Curb
+
+ client.transport.connections.first.connection
+ # => #<Curl::Easy http://localhost:9200/>
+
+It's possible to customize the _Curb_ instance by passing a block to the constructor as well
+(in this case, as an inline block):
+
+ transport = Elasticsearch::Transport::Transport::HTTP::Curb.new \
+ hosts: [ { host: 'localhost', port: '9200' } ],
+ & lambda { |c| c.verbose = true }
+
+ client = Elasticsearch::Client.new transport: transport
+
You can write your own transport implementation easily, by including the
{Elasticsearch::Transport::Transport::Base} module, implementing the required contract,
and passing it to the client as the `transport_class` parameter -- or injecting it directly.
diff --git a/elasticsearch-transport/elasticsearch-transport.gemspec b/elasticsearch-transport/elasticsearch-transport.gemspec
index 77c4500..d92478a 100644
--- a/elasticsearch-transport/elasticsearch-transport.gemspec
+++ b/elasticsearch-transport/elasticsearch-transport.gemspec
@@ -28,7 +28,12 @@ Gem::Specification.new do |s|
end
s.add_development_dependency "bundler", "> 1"
- s.add_development_dependency "rake"
+
+ if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9'
+ s.add_development_dependency "rake", "~> 11.1"
+ else
+ s.add_development_dependency "rake", "< 11.0"
+ end
if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9'
s.add_development_dependency "elasticsearch-extensions"
@@ -46,7 +51,8 @@ Gem::Specification.new do |s|
s.add_development_dependency "curb" unless defined? JRUBY_VERSION
s.add_development_dependency "patron" unless defined? JRUBY_VERSION
s.add_development_dependency "typhoeus", '~> 0.6'
- s.add_development_dependency "manticore", '~> 0.3.5' if defined? JRUBY_VERSION
+ s.add_development_dependency "net-http-persistent"
+ s.add_development_dependency "manticore", '~> 0.5.2' if defined? JRUBY_VERSION
s.add_development_dependency "hashie"
# Prevent unit test failures on Ruby 1.8
diff --git a/elasticsearch-transport/lib/elasticsearch/transport.rb b/elasticsearch-transport/lib/elasticsearch/transport.rb
index 5e5de9e..012671d 100644
--- a/elasticsearch-transport/lib/elasticsearch/transport.rb
+++ b/elasticsearch-transport/lib/elasticsearch/transport.rb
@@ -22,8 +22,8 @@ module Elasticsearch
# A convenience wrapper for {::Elasticsearch::Transport::Client#initialize}.
#
- def new(arguments={})
- Elasticsearch::Transport::Client.new(arguments)
+ def new(arguments={}, &block)
+ Elasticsearch::Transport::Client.new(arguments, &block)
end
extend self
end
diff --git a/elasticsearch-transport/lib/elasticsearch/transport/client.rb b/elasticsearch-transport/lib/elasticsearch/transport/client.rb
index 0f820d8..2aa9134 100644
--- a/elasticsearch-transport/lib/elasticsearch/transport/client.rb
+++ b/elasticsearch-transport/lib/elasticsearch/transport/client.rb
@@ -56,6 +56,7 @@ module Elasticsearch
#
# @option arguments [Boolean,Number] :retry_on_failure Retry X times when request fails before raising and
# exception (false by default)
+ # @option arguments Array<Number> :retry_on_status Retry when specific status codes are returned
#
# @option arguments [Boolean] :reload_on_failure Reload connections after failure (false by default)
#
@@ -79,34 +80,42 @@ module Elasticsearch
# @option arguments [String] :send_get_body_as Specify the HTTP method to use for GET requests with a body.
# (Default: GET)
#
- def initialize(arguments={})
- hosts = arguments[:hosts] || \
- arguments[:host] || \
- arguments[:url] || \
- arguments[:urls] || \
+ # @yield [faraday] Access and configure the `Faraday::Connection` instance directly with a block
+ #
+ def initialize(arguments={}, &block)
+ @arguments = arguments
+
+ hosts = @arguments[:hosts] || \
+ @arguments[:host] || \
+ @arguments[:url] || \
+ @arguments[:urls] || \
ENV.fetch('ELASTICSEARCH_URL', 'localhost:9200')
- arguments[:logger] ||= arguments[:log] ? DEFAULT_LOGGER.call() : nil
- arguments[:tracer] ||= arguments[:trace] ? DEFAULT_TRACER.call() : nil
- arguments[:reload_connections] ||= false
- arguments[:retry_on_failure] ||= false
- arguments[:reload_on_failure] ||= false
- arguments[:randomize_hosts] ||= false
- arguments[:transport_options] ||= {}
+ @arguments[:logger] ||= @arguments[:log] ? DEFAULT_LOGGER.call() : nil
+ @arguments[:tracer] ||= @arguments[:trace] ? DEFAULT_TRACER.call() : nil
+ @arguments[:reload_connections] ||= false
+ @arguments[:retry_on_failure] ||= false
+ @arguments[:reload_on_failure] ||= false
+ @arguments[:randomize_hosts] ||= false
+ @arguments[:transport_options] ||= {}
+ @arguments[:http] ||= {}
- arguments[:transport_options].update(:request => { :timeout => arguments[:request_timeout] } ) if arguments[:request_timeout]
+ @arguments[:transport_options].update(:request => { :timeout => @arguments[:request_timeout] } ) if @arguments[:request_timeout]
- @send_get_body_as = arguments[:send_get_body_as] || 'GET'
+ @send_get_body_as = @arguments[:send_get_body_as] || 'GET'
- transport_class = arguments[:transport_class] || DEFAULT_TRANSPORT_CLASS
+ transport_class = @arguments[:transport_class] || DEFAULT_TRANSPORT_CLASS
- @transport = arguments[:transport] || begin
+ @transport = @arguments[:transport] || begin
if transport_class == Transport::HTTP::Faraday
- transport_class.new(:hosts => __extract_hosts(hosts, arguments), :options => arguments) do |faraday|
- faraday.adapter(arguments[:adapter] || __auto_detect_adapter)
+ transport_class.new(:hosts => __extract_hosts(hosts, @arguments), :options => @arguments) do |faraday|
+ block.call faraday if block
+ unless (h = faraday.builder.handlers.last) && h.name.start_with?("Faraday::Adapter")
+ faraday.adapter(@arguments[:adapter] || __auto_detect_adapter)
+ end
end
else
- transport_class.new(:hosts => __extract_hosts(hosts, arguments), :options => arguments)
+ transport_class.new(:hosts => __extract_hosts(hosts, @arguments), :options => @arguments)
end
end
end
@@ -143,22 +152,33 @@ module Elasticsearch
end
result = hosts.map do |host|
- case host
- when String
- if host =~ /^[a-z]+\:\/\//
- uri = URI.parse(host)
- { :scheme => uri.scheme, :user => uri.user, :password => uri.password, :host => uri.host, :path => uri.path, :port => uri.port.to_s }
+ host_parts = case host
+ when String
+ if host =~ /^[a-z]+\:\/\//
+ uri = URI.parse(host)
+ { :scheme => uri.scheme, :user => uri.user, :password => uri.password, :host => uri.host, :path => uri.path, :port => uri.port }
+ else
+ host, port = host.split(':')
+ { :host => host, :port => port }
+ end
+ when URI
+ { :scheme => host.scheme, :user => host.user, :password => host.password, :host => host.host, :path => host.path, :port => host.port }
+ when Hash
+ host
else
- host, port = host.split(':')
- { :host => host, :port => port }
+ raise ArgumentError, "Please pass host as a String, URI or Hash -- #{host.class} given."
end
- when URI
- { :scheme => host.scheme, :user => host.user, :password => host.password, :host => host.host, :path => host.path, :port => host.port.to_s }
- when Hash
- host
- else
- raise ArgumentError, "Please pass host as a String, URI or Hash -- #{host.class} given."
+
+ host_parts[:port] = host_parts[:port].to_i unless host_parts[:port].nil?
+
+ # Transfer the selected host parts such as authentication credentials to `options`,
+ # so we can re-use them when reloading connections
+ #
+ host_parts.select { |k,v| [:scheme, :port, :user, :password].include?(k) }.each do |k,v|
+ @arguments[:http][k] ||= v
end
+
+ host_parts
end
result.shuffle! if options[:randomize_hosts]
diff --git a/elasticsearch-transport/lib/elasticsearch/transport/transport/base.rb b/elasticsearch-transport/lib/elasticsearch/transport/transport/base.rb
index 5f478f4..68b6a1f 100644
--- a/elasticsearch-transport/lib/elasticsearch/transport/transport/base.rb
+++ b/elasticsearch-transport/lib/elasticsearch/transport/transport/base.rb
@@ -11,13 +11,14 @@ module Elasticsearch
DEFAULT_RESURRECT_AFTER = 60 # Seconds
DEFAULT_MAX_RETRIES = 3 # Requests
DEFAULT_SERIALIZER_CLASS = Serializer::MultiJson
+ SANITIZED_PASSWORD = '*' * (rand(14)+1)
attr_reader :hosts, :options, :connections, :counter, :last_request_at, :protocol
attr_accessor :serializer, :sniffer, :logger, :tracer,
:reload_connections, :reload_after,
:resurrect_after, :max_retries
- # Creates a new transport object.
+ # Creates a new transport object
#
# @param arguments [Hash] Settings and options for the transport
# @param block [Proc] Lambda or Proc which can be evaluated in the context of the "session" object
@@ -28,8 +29,12 @@ module Elasticsearch
# @see Client#initialize
#
def initialize(arguments={}, &block)
+ @state_mutex = Mutex.new
+
@hosts = arguments[:hosts] || []
@options = arguments[:options] || {}
+ @options[:http] ||= {}
+
@block = block
@connections = __build_connections
@@ -41,11 +46,13 @@ module Elasticsearch
@sniffer = options[:sniffer_class] ? options[:sniffer_class].new(self) : Sniffer.new(self)
@counter = 0
+ @counter_mtx = Mutex.new
@last_request_at = Time.now
@reload_connections = options[:reload_connections]
@reload_after = options[:reload_connections].is_a?(Fixnum) ? options[:reload_connections] : DEFAULT_RELOAD_AFTER
@resurrect_after = options[:resurrect_after] || DEFAULT_RESURRECT_AFTER
@max_retries = options[:retry_on_failure].is_a?(Fixnum) ? options[:retry_on_failure] : DEFAULT_MAX_RETRIES
+ @retry_on_status = Array(options[:retry_on_status]).map { |d| d.to_i }
end
# Returns a connection from the connection pool by delegating to {Connections::Collection#get_connection}.
@@ -59,14 +66,12 @@ module Elasticsearch
def get_connection(options={})
resurrect_dead_connections! if Time.now > @last_request_at + @resurrect_after
- connection = connections.get_connection(options)
- @counter += 1
-
+ @counter_mtx.synchronize { @counter += 1 }
reload_connections! if reload_connections && counter % reload_after == 0
- connection
+ connections.get_connection(options)
end
- # Reloads and replaces the connection collection based on cluster information.
+ # Reloads and replaces the connection collection based on cluster information
#
# @see Sniffer#hosts
#
@@ -79,7 +84,7 @@ module Elasticsearch
self
end
- # Tries to "resurrect" all eligible dead connections.
+ # Tries to "resurrect" all eligible dead connections
#
# @see Connections::Connection#resurrect!
#
@@ -87,37 +92,97 @@ module Elasticsearch
connections.dead.each { |c| c.resurrect! }
end
- # Replaces the connections collection.
+ # Rebuilds the connections collection in the transport.
+ #
+ # The methods *adds* new connections from the passed hosts to the collection,
+ # and *removes* all connections not contained in the passed hosts.
#
+ # @return [Connections::Collection]
# @api private
#
def __rebuild_connections(arguments={})
- @hosts = arguments[:hosts] || []
- @options = arguments[:options] || {}
- @connections = __build_connections
+ @state_mutex.synchronize do
+ @hosts = arguments[:hosts] || []
+ @options = arguments[:options] || {}
+
+ __close_connections
+
+ new_connections = __build_connections
+ stale_connections = @connections.all.select { |c| ! new_connections.include?(c) }
+ new_connections = new_connections.reject { |c| @connections.include?(c) }
+
+ @connections.remove(stale_connections)
+ @connections.add(new_connections)
+ @connections
+ end
+ end
+
+ # Builds and returns a collection of connections
+ #
+ # The adapters have to implement the {Base#__build_connection} method.
+ #
+ # @return [Connections::Collection]
+ # @api private
+ #
+ def __build_connections
+ Connections::Collection.new \
+ :connections => hosts.map { |host|
+ host[:protocol] = host[:scheme] || options[:scheme] || options[:http][:scheme] || DEFAULT_PROTOCOL
+ host[:port] ||= options[:port] || options[:http][:scheme] || DEFAULT_PORT
+ if (options[:user] || options[:http][:user]) && !host[:user]
+ host[:user] ||= options[:user] || options[:http][:user]
+ host[:password] ||= options[:password] || options[:http][:password]
+ end
+
+ __build_connection(host, (options[:transport_options] || {}), @block)
+ },
+ :selector_class => options[:selector_class],
+ :selector => options[:selector]
+ end
+
+ # @abstract Build and return a connection.
+ # A transport implementation *must* implement this method.
+ # See {HTTP::Faraday#__build_connection} for an example.
+ #
+ # @return [Connections::Connection]
+ # @api private
+ #
+ def __build_connection(host, options={}, block=nil)
+ raise NoMethodError, "Implement this method in your class"
end
- # Log request and response information.
+ # Closes the connections collection
+ #
+ # @api private
+ #
+ def __close_connections
+ # A hook point for specific adapters when they need to close connections
+ end
+
+ # Log request and response information
#
# @api private
#
def __log(method, path, params, body, url, response, json, took, duration)
- logger.info "#{method.to_s.upcase} #{url} " +
+ sanitized_url = url.to_s.gsub(/\/\/(.+):(.+)@/, '//' + '\1:' + SANITIZED_PASSWORD + '@')
+ logger.info "#{method.to_s.upcase} #{sanitized_url} " +
"[status:#{response.status}, request:#{sprintf('%.3fs', duration)}, query:#{took}]"
logger.debug "> #{__convert_to_json(body)}" if body
logger.debug "< #{response.body}"
end
- # Log failed request.
+ # Log failed request
#
# @api private
+ #
def __log_failed(response)
logger.fatal "[#{response.status}] #{response.body}"
end
- # Trace the request in the `curl` format.
+ # Trace the request in the `curl` format
#
# @api private
+ #
def __trace(method, path, params, body, url, response, json, took, duration)
trace_url = "http://localhost:9200/#{path}?pretty" +
( params.empty? ? '' : "&#{::Faraday::Utils::ParamsHash[params].to_query}" )
@@ -130,6 +195,7 @@ module Elasticsearch
# Raise error specific for the HTTP response status or a generic server error
#
# @api private
+ #
def __raise_transport_error(response)
error = ERRORS[response.status] || ServerError
raise error.new "[#{response.status}] #{response.body}"
@@ -138,6 +204,7 @@ module Elasticsearch
# Converts any non-String object to JSON
#
# @api private
+ #
def __convert_to_json(o=nil, options={})
o = o.is_a?(String) ? o : serializer.dump(o, options)
end
@@ -149,7 +216,7 @@ module Elasticsearch
# @api private
def __full_url(host)
url = "#{host[:protocol]}://"
- url += "#{host[:user]}:#{host[:password]}@" if host[:user]
+ url += "#{CGI.escape(host[:user])}:#{CGI.escape(host[:password])}@" if host[:user]
url += "#{host[:host]}:#{host[:port]}"
url += "#{host[:path]}" if host[:path]
url
@@ -191,6 +258,23 @@ module Elasticsearch
connection.healthy! if connection.failures > 0
+ # Raise an exception so we can catch it for `retry_on_status`
+ __raise_transport_error(response) if response.status.to_i >= 300 && @retry_on_status.include?(response.status.to_i)
+
+ rescue Elasticsearch::Transport::Transport::ServerError => e
+ if @retry_on_status.include?(response.status)
+ logger.warn "[#{e.class}] Attempt #{tries} to get response from #{url}" if logger
+ logger.debug "[#{e.class}] Attempt #{tries} to get response from #{url}" if logger
+ if tries <= max_retries
+ retry
+ else
+ logger.fatal "[#{e.class}] Cannot get response from #{url} after #{tries} tries" if logger
+ raise e
+ end
+ else
+ raise e
+ end
+
rescue *host_unreachable_exceptions => e
logger.error "[#{e.class}] #{e.message} #{connection.host.inspect}" if logger
@@ -216,7 +300,8 @@ module Elasticsearch
rescue Exception => e
logger.fatal "[#{e.class}] #{e.message} (#{connection.host.inspect if connection})" if logger
raise e
- end
+
+ end #/begin
duration = Time.now-start if logger || tracer
@@ -227,7 +312,7 @@ module Elasticsearch
__raise_transport_error response
end
- json = serializer.load(response.body) if response.headers && response.headers["content-type"] =~ /json/
+ json = serializer.load(response.body) if response.body && !response.body.empty? && response.headers && response.headers["content-type"] =~ /json/
took = (json['took'] ? sprintf('%.3fs', json['took']/1000.0) : 'n/a') rescue 'n/a' if logger || tracer
__log method, path, params, body, url, response, json, took, duration if logger
@@ -246,15 +331,6 @@ module Elasticsearch
def host_unreachable_exceptions
[Errno::ECONNREFUSED]
end
-
- # @abstract A transport implementation must implement this method.
- # See {HTTP::Faraday#__build_connections} for an example.
- #
- # @return [Connections::Collection]
- # @api private
- def __build_connections
- raise NoMethodError, "Implement this method in your class"
- end
end
end
end
diff --git a/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/collection.rb b/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/collection.rb
index 136df4b..5edc43e 100644
--- a/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/collection.rb
+++ b/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/collection.rb
@@ -85,6 +85,26 @@ module Elasticsearch
def size
connections.size
end
+
+ # Add connection(s) to the collection
+ #
+ # @param connections [Connection,Array] A connection or an array of connections to add
+ # @return [self]
+ #
+ def add(connections)
+ @connections += Array(connections).to_a
+ self
+ end
+
+ # Remove connection(s) from the collection
+ #
+ # @param connections [Connection,Array] A connection or an array of connections to remove
+ # @return [self]
+ #
+ def remove(connections)
+ @connections -= Array(connections).to_a
+ self
+ end
end
end
diff --git a/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/connection.rb b/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/connection.rb
index 0efd0b3..4c00a8f 100644
--- a/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/connection.rb
+++ b/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/connection.rb
@@ -26,8 +26,10 @@ module Elasticsearch
@host = arguments[:host]
@connection = arguments[:connection]
@options = arguments[:options] || {}
+ @state_mutex = Mutex.new
@options[:resurrect_timeout] ||= DEFAULT_RESURRECT_TIMEOUT
+ @dead = false
@failures = 0
end
@@ -65,9 +67,11 @@ module Elasticsearch
# @return [self]
#
def dead!
- @dead = true
- @failures += 1
- @dead_since = Time.now
+ @state_mutex.synchronize do
+ @dead = true
+ @failures += 1
+ @dead_since = Time.now
+ end
self
end
@@ -76,7 +80,9 @@ module Elasticsearch
# @return [self]
#
def alive!
- @dead = false
+ @state_mutex.synchronize do
+ @dead = false
+ end
self
end
@@ -85,8 +91,10 @@ module Elasticsearch
# @return [self]
#
def healthy!
- @dead = false
- @failures = 0
+ @state_mutex.synchronize do
+ @dead = false
+ @failures = 0
+ end
self
end
@@ -105,7 +113,19 @@ module Elasticsearch
# @return [Boolean]
#
def resurrectable?
- Time.now > @dead_since + ( @options[:resurrect_timeout] * 2 ** (@failures-1) )
+ @state_mutex.synchronize {
+ Time.now > @dead_since + ( @options[:resurrect_timeout] * 2 ** (@failures-1) )
+ }
+ end
+
+ # Equality operator based on connection protocol, host and port
+ #
+ # @return [Boolean]
+ #
+ def ==(other)
+ self.host[:protocol] == other.host[:protocol] && \
+ self.host[:host] == other.host[:host] && \
+ self.host[:port].to_i == other.host[:port].to_i
end
# @return [String]
diff --git a/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/selector.rb b/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/selector.rb
index 3fec180..cd1e980 100644
--- a/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/selector.rb
+++ b/elasticsearch-transport/lib/elasticsearch/transport/transport/connections/selector.rb
@@ -50,7 +50,7 @@ module Elasticsearch
#
def select(options={})
# On Ruby 1.9, Array#rotate could be used instead
- @current = @current.nil? ? 0 : @current+1
+ @current = !defined?(@current) || @current.nil? ? 0 : @current+1
@current = 0 if @current >= connections.size
connections[@current]
end
diff --git a/elasticsearch-transport/lib/elasticsearch/transport/transport/http/curb.rb b/elasticsearch-transport/lib/elasticsearch/transport/transport/http/curb.rb
index c72a210..e66acac 100644
--- a/elasticsearch-transport/lib/elasticsearch/transport/transport/http/curb.rb
+++ b/elasticsearch-transport/lib/elasticsearch/transport/transport/http/curb.rb
@@ -37,32 +37,24 @@ module Elasticsearch
end
end
- # Builds and returns a collection of connections.
+ # Builds and returns a connection
#
- # @return [Connections::Collection]
+ # @return [Connections::Connection]
#
- def __build_connections
- Connections::Collection.new \
- :connections => hosts.map { |host|
- host[:protocol] = host[:scheme] || DEFAULT_PROTOCOL
- host[:port] ||= DEFAULT_PORT
+ def __build_connection(host, options={}, block=nil)
+ client = ::Curl::Easy.new
+ client.headers = {'User-Agent' => "Curb #{Curl::CURB_VERSION}"}
+ client.url = __full_url(host)
- client = ::Curl::Easy.new
- client.headers = {'User-Agent' => "Curb #{Curl::CURB_VERSION}"}
- client.url = __full_url(host)
-
- if host[:user]
- client.http_auth_types = host[:auth_type] || :basic
- client.username = host[:user]
- client.password = host[:password]
- end
+ if host[:user]
+ client.http_auth_types = host[:auth_type] || :basic
+ client.username = host[:user]
+ client.password = host[:password]
+ end
- client.instance_eval &@block if @block
+ client.instance_eval(&block) if block
- Connections::Connection.new :host => host, :connection => client
- },
- :selector_class => options[:selector_class],
- :selector => options[:selector]
+ Connections::Connection.new :host => host, :connection => client
end
# Returns an array of implementation specific connection errors.
diff --git a/elasticsearch-transport/lib/elasticsearch/transport/transport/http/faraday.rb b/elasticsearch-transport/lib/elasticsearch/transport/transport/http/faraday.rb
index 04800a2..3048925 100644
--- a/elasticsearch-transport/lib/elasticsearch/transport/transport/http/faraday.rb
+++ b/elasticsearch-transport/lib/elasticsearch/transport/transport/http/faraday.rb
@@ -27,23 +27,13 @@ module Elasticsearch
end
end
- # Builds and returns a collection of connections.
+ # Builds and returns a connection
#
- # @return [Connections::Collection]
+ # @return [Connections::Connection]
#
- def __build_connections
- Connections::Collection.new \
- :connections => hosts.map { |host|
- host[:protocol] = host[:scheme] || DEFAULT_PROTOCOL
- host[:port] ||= DEFAULT_PORT
- url = __full_url(host)
-
- Connections::Connection.new \
- :host => host,
- :connection => ::Faraday::Connection.new(url, (options[:transport_options] || {}), &@block )
- },
- :selector_class => options[:selector_class],
- :selector => options[:selector]
+ def __build_connection(host, options={}, block=nil)
+ client = ::Faraday::Connection.new(__full_url(host), options, &block)
+ Connections::Connection.new :host => host, :connection => client
end
# Returns an array of implementation specific connection errors.
diff --git a/elasticsearch-transport/lib/elasticsearch/transport/transport/http/manticore.rb b/elasticsearch-transport/lib/elasticsearch/transport/transport/http/manticore.rb
index 32314c7..43607a1 100644
--- a/elasticsearch-transport/lib/elasticsearch/transport/transport/http/manticore.rb
+++ b/elasticsearch-transport/lib/elasticsearch/transport/transport/http/manticore.rb
@@ -45,6 +45,19 @@ module Elasticsearch
class Manticore
include Base
+ def initialize(arguments={}, &block)
+ @manticore = build_client(arguments[:options] || {})
+ super(arguments, &block)
+ end
+
+ # Should just be run once at startup
+ def build_client(options={})
+ client_options = options[:transport_options] || {}
+ client_options[:ssl] = options[:ssl] || {}
+
+ @manticore = ::Manticore::Client.new(client_options)
+ end
+
# Performs the request by invoking {Transport::Base#perform_request} with a block.
#
# @return [Response]
@@ -84,9 +97,6 @@ module Elasticsearch
@request_options[:headers] = options[:headers]
end
- client_options = options[:transport_options] || {}
- client_options[:ssl] = options[:ssl] || {}
-
Connections::Collection.new \
:connections => hosts.map { |host|
host[:protocol] = host[:scheme] || DEFAULT_PROTOCOL
@@ -95,16 +105,24 @@ module Elasticsearch
host.delete(:user) # auth is not supported here.
host.delete(:password) # use the headers
- url = __full_url(host)
-
Connections::Connection.new \
:host => host,
- :connection => ::Manticore::Client.new(client_options)
+ :connection => @manticore
},
:selector_class => options[:selector_class],
:selector => options[:selector]
end
+ # Closes all connections by marking them as dead
+ # and closing the underlying HttpClient instances
+ #
+ # @return [Connections::Collection]
+ #
+ def __close_connections
+ # The Manticore adapter uses a single long-lived instance
+ # of Manticore::Client, so we don't close the connections.
+ end
+
# Returns an array of implementation specific connection errors.
#
# @return [Array]
diff --git a/elasticsearch-transport/lib/elasticsearch/transport/transport/sniffer.rb b/elasticsearch-transport/lib/elasticsearch/transport/transport/sniffer.rb
index d202b30..a18709f 100644
--- a/elasticsearch-transport/lib/elasticsearch/transport/transport/sniffer.rb
+++ b/elasticsearch-transport/lib/elasticsearch/transport/transport/sniffer.rb
@@ -2,15 +2,16 @@ module Elasticsearch
module Transport
module Transport
- # Handles node discovery ("sniffing").
+ # Handles node discovery ("sniffing")
#
class Sniffer
- RE_URL = /\/([^:]*):([0-9]+)\]/ # Use named groups on Ruby 1.9: /\/(?<host>[^:]*):(?<port>[0-9]+)\]/
+ ES1_RE_URL = /\[([^\/]*)?\/?([^:]*):([0-9]+)\]/
+ ES2_RE_URL = /([^\/]*)?\/?([^:]*):([0-9]+)/
attr_reader :transport
attr_accessor :timeout
- # @param transport [Object] A transport instance.
+ # @param transport [Object] A transport instance
#
def initialize(transport)
@transport = transport
@@ -30,9 +31,12 @@ module Elasticsearch
Timeout::timeout(timeout, SnifferTimeoutError) do
nodes = transport.perform_request('GET', '_nodes/http').body
hosts = nodes['nodes'].map do |id,info|
- if matches = info["#{transport.protocol}_address"].to_s.match(RE_URL)
- # TODO: Implement lightweight "indifferent access" here
- info.merge :host => matches[1], :port => matches[2], :id => id
+ addr_str = info["#{transport.protocol}_address"].to_s
+ matches = addr_str.match(ES1_RE_URL) || addr_str.match(ES2_RE_URL)
+ if matches
+ host = matches[1].empty? ? matches[2] : matches[1]
+ port = matches[3]
+ info.merge :host => host, :port => port, :id => id
end
end.compact
diff --git a/elasticsearch-transport/lib/elasticsearch/transport/version.rb b/elasticsearch-transport/lib/elasticsearch/transport/version.rb
index a68367e..b9b82e7 100644
--- a/elasticsearch-transport/lib/elasticsearch/transport/version.rb
+++ b/elasticsearch-transport/lib/elasticsearch/transport/version.rb
@@ -1,5 +1,5 @@
module Elasticsearch
module Transport
- VERSION = "1.0.12"
+ VERSION = "2.0.0"
end
end
diff --git a/elasticsearch-transport/test/integration/client_test.rb b/elasticsearch-transport/test/integration/client_test.rb
index fbbe255..4fdccfd 100644
--- a/elasticsearch-transport/test/integration/client_test.rb
+++ b/elasticsearch-transport/test/integration/client_test.rb
@@ -5,10 +5,14 @@ class Elasticsearch::Transport::ClientIntegrationTest < Elasticsearch::Test::Int
Elasticsearch::Extensions::Test::Cluster.start(nodes: 2) if ENV['SERVER'] and not Elasticsearch::Extensions::Test::Cluster.running?
end
+ shutdown do
+ Elasticsearch::Extensions::Test::Cluster.stop if ENV['SERVER'] and Elasticsearch::Extensions::Test::Cluster.running?
+ end
+
context "Elasticsearch client" do
teardown do
- begin; Object.send(:remove_const, :Typhoeus); rescue NameError; end
- begin; Object.send(:remove_const, :Patron); rescue NameError; end
+ begin; Object.send(:remove_const, :Typhoeus); rescue NameError; end
+ begin; Net::HTTP.send(:remove_const, :Persistent); rescue NameError; end
end
setup do
@@ -59,6 +63,20 @@ class Elasticsearch::Transport::ClientIntegrationTest < Elasticsearch::Test::Int
assert_match /---\ncluster_name:/, response.body.to_s
end
+ should "pass options to the Faraday::Connection with a block" do
+ @client = Elasticsearch::Client.new(
+ host: "localhost:#{@port}",
+ logger: (ENV['QUIET'] ? nil : @logger)
+ ) do |client|
+ client.headers['Content-Type'] = 'application/yaml'
+ end
+
+ response = @client.perform_request 'GET', '_cluster/health'
+
+ assert response.body.start_with?("---\n"), "Response body should be YAML: #{response.body.inspect}"
+ assert_equal 'application/yaml', response.headers['content-type']
+ end
+
context "with round robin selector" do
setup do
@client = Elasticsearch::Client.new \
@@ -131,11 +149,69 @@ class Elasticsearch::Transport::ClientIntegrationTest < Elasticsearch::Test::Int
end
end
+ context "with retrying on status" do
+ should "retry when the status does match" do
+ @client = Elasticsearch::Client.new \
+ hosts: ["localhost:#{@port}"],
+ logger: (ENV['QUIET'] ? nil : @logger),
+ retry_on_status: 400
+
+ @client.transport.logger
+ .expects(:debug)
+ .with( regexp_matches(/Attempt \d to get response/) )
+ .times(4)
+
+ assert_raise Elasticsearch::Transport::Transport::Errors::BadRequest do
+ @client.perform_request 'GET', '_foobar'
+ end
+ end
+ end
+
+ context "when reloading connections" do
+ should "keep existing connections" do
+ require 'patron' # We need a client with keep-alive
+ client = Elasticsearch::Transport::Client.new host: "localhost:#{@port}", adapter: :patron, logger: @logger
+
+ assert_equal 'Faraday::Adapter::Patron',
+ client.transport.connections.first.connection.builder.handlers.first.name
+
+ response = client.perform_request 'GET', '_nodes/stats/http'
+
+ a = response.body['nodes'].values.select { |n| n['name'] == 'node-1' }.first['http']['total_opened']
+
+ client.transport.reload_connections!
+
+ response = client.perform_request 'GET', '_nodes/stats/http'
+ b = response.body['nodes'].values.select { |n| n['name'] == 'node-1' }.first['http']['total_opened']
+
+ assert_equal a, b
+ end unless JRUBY
+ end
+
context "with Faraday adapters" do
+ should "set the adapter with a block" do
+ require 'net/http/persistent'
+
+ client = Elasticsearch::Transport::Client.new url: "localhost:#{@port}" do |f|
+ f.adapter :net_http_persistent
+ end
+
+ assert_equal 'Faraday::Adapter::NetHttpPersistent',
+ client.transport.connections.first.connection.builder.handlers.first.name
+
+ response = @client.perform_request 'GET', '_cluster/health'
+ assert_equal 200, response.status
+ end
+
should "automatically use the Patron client when loaded" do
+ teardown { begin; Object.send(:remove_const, :Patron); rescue NameError; end }
+
require 'patron'
client = Elasticsearch::Transport::Client.new host: "localhost:#{@port}"
+ assert_equal 'Faraday::Adapter::Patron',
+ client.transport.connections.first.connection.builder.handlers.first.name
+
response = @client.perform_request 'GET', '_cluster/health'
assert_equal 200, response.status
end unless JRUBY
diff --git a/elasticsearch-transport/test/integration/transport_test.rb b/elasticsearch-transport/test/integration/transport_test.rb
index e90462c..cbbef03 100644
--- a/elasticsearch-transport/test/integration/transport_test.rb
+++ b/elasticsearch-transport/test/integration/transport_test.rb
@@ -5,6 +5,10 @@ class Elasticsearch::Transport::ClientIntegrationTest < Elasticsearch::Test::Int
Elasticsearch::Extensions::Test::Cluster.start(nodes: 2) if ENV['SERVER'] and not Elasticsearch::Extensions::Test::Cluster.running?
end
+ shutdown do
+ Elasticsearch::Extensions::Test::Cluster.stop if ENV['SERVER'] and Elasticsearch::Extensions::Test::Cluster.running?
+ end
+
context "Transport" do
setup do
@port = (ENV['TEST_CLUSTER_PORT'] || 9250).to_i
diff --git a/elasticsearch-transport/test/unit/client_test.rb b/elasticsearch-transport/test/unit/client_test.rb
index c25e7ff..1baa1a2 100644
--- a/elasticsearch-transport/test/unit/client_test.rb
+++ b/elasticsearch-transport/test/unit/client_test.rb
@@ -127,6 +127,17 @@ class Elasticsearch::Transport::ClientTest < Test::Unit::TestCase
hosts = @client.__extract_hosts( { :host => 'myhost', :scheme => 'https' } )
assert_equal 'myhost', hosts[0][:host]
assert_equal 'https', hosts[0][:scheme]
+ assert_nil hosts[0][:port]
+ end
+
+ should "extract from hash with a port passed as a string" do
+ hosts = @client.__extract_hosts( { :host => 'myhost', :scheme => 'https', :port => '443' } )
+ assert_equal 443, hosts[0][:port]
+ end
+
+ should "extract from hash with a port passed as an integer" do
+ hosts = @client.__extract_hosts( { :host => 'myhost', :scheme => 'https', :port => 443 } )
+ assert_equal 443, hosts[0][:port]
end
should "extract from Hashie::Mash" do
@@ -154,10 +165,10 @@ class Elasticsearch::Transport::ClientTest < Test::Unit::TestCase
assert_equal 2, hosts.size
assert_equal 'host1', hosts[0][:host]
- assert_equal '1000', hosts[0][:port]
+ assert_equal 1000, hosts[0][:port]
assert_equal 'host2', hosts[1][:host]
- assert_equal '2000', hosts[1][:port]
+ assert_equal 2000, hosts[1][:port]
end
should "extract path" do
@@ -171,7 +182,7 @@ class Elasticsearch::Transport::ClientTest < Test::Unit::TestCase
assert_equal 'https', hosts[0][:scheme]
assert_equal 'myhost', hosts[0][:host]
- assert_equal '8080', hosts[0][:port]
+ assert_equal 8080, hosts[0][:port]
end
should "extract credentials" do
@@ -181,14 +192,14 @@ class Elasticsearch::Transport::ClientTest < Test::Unit::TestCase
assert_equal 'USERNAME', hosts[0][:user]
assert_equal 'PASSWORD', hosts[0][:password]
assert_equal 'myhost', hosts[0][:host]
- assert_equal '8080', hosts[0][:port]
+ assert_equal 8080, hosts[0][:port]
end
should "pass hashes over" do
hosts = @client.__extract_hosts [{:host => 'myhost', :port => '1000', :foo => 'bar'}]
assert_equal 'myhost', hosts[0][:host]
- assert_equal '1000', hosts[0][:port]
+ assert_equal 1000, hosts[0][:port]
assert_equal 'bar', hosts[0][:foo]
end
@@ -200,7 +211,7 @@ class Elasticsearch::Transport::ClientTest < Test::Unit::TestCase
assert_equal 'USERNAME', hosts[0][:user]
assert_equal 'PASSWORD', hosts[0][:password]
assert_equal 'myhost', hosts[0][:host]
- assert_equal '4430', hosts[0][:port]
+ assert_equal 4430, hosts[0][:port]
end
should "split comma-separated URLs" do
@@ -259,5 +270,84 @@ class Elasticsearch::Transport::ClientTest < Test::Unit::TestCase
end unless JRUBY
end
+ context "configuring Faraday" do
+ setup do
+ Elasticsearch::Transport::Client::DEFAULT_TRANSPORT_CLASS.any_instance.unstub(:__build_connections)
+ begin; Object.send(:remove_const, :Typhoeus); rescue NameError; end
+ end
+
+ should "apply faraday adapter" do
+ c = Elasticsearch::Transport::Client.new do |faraday|
+ faraday.adapter :typhoeus
+ end
+ handlers = c.transport.connections.all.first.connection.builder.handlers
+
+ assert_includes handlers, Faraday::Adapter::Typhoeus
+ end
+
+ should "apply faraday response logger" do
+ c = Elasticsearch::Transport::Client.new do |faraday|
+ faraday.response :logger
+ end
+ handlers = c.transport.connections.all.first.connection.builder.handlers
+
+ assert_includes handlers, Faraday::Response::Logger
+ end
+ end
+
+ context "when passed options" do
+ setup do
+ Elasticsearch::Transport::Client::DEFAULT_TRANSPORT_CLASS.any_instance.unstub(:__build_connections)
+ end
+
+ should "configure the HTTP scheme" do
+ c = Elasticsearch::Transport::Client.new \
+ :hosts => ['node1', 'node2'],
+ :port => 1234, :scheme => 'https', :user => 'USERNAME', :password => 'PASSWORD'
+
+ assert_equal 'https://USERNAME:PASSWORD@node1:1234/', c.transport.connections[0].full_url('')
+ assert_equal 'https://USERNAME:PASSWORD@node2:1234/', c.transport.connections[1].full_url('')
+ end
+
+ should "keep the credentials after reloading" do
+ Elasticsearch::Transport::Client::DEFAULT_TRANSPORT_CLASS.any_instance.
+ stubs(:sniffer).
+ returns( mock(:hosts => [ {:host => 'foobar', :port => 4567, :id => 'foobar4567'} ]) )
+
+ c = Elasticsearch::Transport::Client.new \
+ :url => 'http://foo:1234',
+ :user => 'USERNAME', :password => 'PASSWORD'
+
+ assert_equal 'http://USERNAME:PASSWORD@foo:1234/', c.transport.connections.first.full_url('')
+
+ c.transport.reload_connections!
+
+ assert_equal 'http://USERNAME:PASSWORD@foobar:4567/', c.transport.connections.first.full_url('')
+ end
+
+ should "transfer selected host parts into the 'http' options" do
+ c = Elasticsearch::Transport::Client.new \
+ :host => { :scheme => 'https', :port => '8080', :host => 'node1', :user => 'U', :password => 'P' }
+
+ assert_equal 'https://U:P@node1:8080/', c.transport.connections.first.full_url('')
+
+ assert_equal 'https', c.transport.options[:http][:scheme]
+ assert_equal 8080, c.transport.options[:http][:port]
+ assert_equal 'U', c.transport.options[:http][:user]
+ assert_equal 'P', c.transport.options[:http][:password]
+ end
+
+ should "transfer selected host parts from URL into the 'http' options" do
+ c = Elasticsearch::Transport::Client.new :url => 'https://U:P@node1:8080'
+
+ assert_equal 'https://U:P@node1:8080/', c.transport.connections.first.full_url('')
+
+ assert_equal 'https', c.transport.options[:http][:scheme]
+ assert_equal 8080, c.transport.options[:http][:port]
+ assert_equal 'U', c.transport.options[:http][:user]
+ assert_equal 'P', c.transport.options[:http][:password]
+ end
+ end
+
end
end
diff --git a/elasticsearch-transport/test/unit/connection_collection_test.rb b/elasticsearch-transport/test/unit/connection_collection_test.rb
index 1b6d8ca..5be9cb3 100644
--- a/elasticsearch-transport/test/unit/connection_collection_test.rb
+++ b/elasticsearch-transport/test/unit/connection_collection_test.rb
@@ -43,6 +43,48 @@ class Elasticsearch::Transport::Transport::Connections::CollectionTest < Test::U
assert_equal 2, c.size
end
+ should "add connections" do
+ c = Collection.new :connections => [ Connection.new(:host => { :protocol => 'http', :host => 'foo', :port => 1}) ]
+ assert_equal 1, c.size
+
+ c.add([ Connection.new(:host => { :protocol => 'http', :host => 'bar', :port => 1 }),
+ Connection.new(:host => { :protocol => 'http', :host => 'bam', :port => 1 }) ])
+ assert_equal 3, c.size
+ end
+
+ should "add connection" do
+ c = Collection.new :connections => [ Connection.new(:host => { :protocol => 'http', :host => 'foo', :port => 1}) ]
+ assert_equal 1, c.size
+
+ c.add(Connection.new(:host => { :protocol => 'http', :host => 'bar', :port => 1 }))
+ assert_equal 2, c.size
+ end
+
+ should "remove connections" do
+ c = Collection.new :connections => [
+ Connection.new(:host => { :protocol => 'http', :host => 'foo', :port => 1 }),
+ Connection.new(:host => { :protocol => 'http', :host => 'bar', :port => 1 })
+ ]
+ assert_equal 2, c.size
+
+ c.remove([c.first])
+ assert_equal 1, c.size
+
+ c.remove(c)
+ assert_equal 0, c.size
+ end
+
+ should "remove connection" do
+ c = Collection.new :connections => [
+ Connection.new(:host => { :protocol => 'http', :host => 'foo', :port => 1 }),
+ Connection.new(:host => { :protocol => 'http', :host => 'bar', :port => 1 })
+ ]
+ assert_equal 2, c.size
+
+ c.remove(c.first)
+ assert_equal 1, c.size
+ end
+
context "with the dead pool" do
setup do
@collection = Collection.new :connections => [ Connection.new(:host => 'foo'), Connection.new(:host => 'bar') ]
@@ -66,8 +108,8 @@ class Elasticsearch::Transport::Transport::Connections::CollectionTest < Test::U
end
should "resurrect dead connection with least failures when no alive is available" do
- c1 = Connection.new(:host => 'foo').dead!.dead!
- c2 = Connection.new(:host => 'bar').dead!
+ c1 = Connection.new(:host => { :protocol => 'http', :host => 'foo', :port => 123 }).dead!.dead!
+ c2 = Connection.new(:host => { :protocol => 'http', :host => 'bar', :port => 123 }).dead!
@collection = Collection.new :connections => [ c1, c2 ]
diff --git a/elasticsearch-transport/test/unit/connection_test.rb b/elasticsearch-transport/test/unit/connection_test.rb
index 911acb2..e44ca3f 100644
--- a/elasticsearch-transport/test/unit/connection_test.rb
+++ b/elasticsearch-transport/test/unit/connection_test.rb
@@ -95,6 +95,15 @@ class Elasticsearch::Transport::Transport::Connections::ConnectionTest < Test::U
assert ! c.dead?, c.inspect
end
+ should "implement the equality operator" do
+ c1 = Connection.new(:host => { :protocol => 'http', :host => 'foo', :port => 123 })
+ c2 = Connection.new(:host => { :protocol => 'http', :host => 'foo', :port => 123 })
+ c3 = Connection.new(:host => { :protocol => 'http', :host => 'foo', :port => 456 })
+
+ assert c1 == c2, "Connection #{c1} should be equal to #{c2}"
+ assert c2 != c3, "Connection #{c2} should NOT be equal to #{c3}"
+ end
+
end
end
diff --git a/elasticsearch-transport/test/unit/sniffer_test.rb b/elasticsearch-transport/test/unit/sniffer_test.rb
index b60e633..f1ab346 100644
--- a/elasticsearch-transport/test/unit/sniffer_test.rb
+++ b/elasticsearch-transport/test/unit/sniffer_test.rb
@@ -21,7 +21,7 @@ class Elasticsearch::Transport::Transport::SnifferTest < Test::Unit::TestCase
assert_equal @transport, @sniffer.transport
end
- should "return an array of hosts as hashes" do
+ should "return an array of hosts as hashes with Elasticsearch 1.x syntax" do
@transport.expects(:perform_request).returns __nodes_info <<-JSON
{
"ok" : true,
@@ -48,6 +48,60 @@ class Elasticsearch::Transport::Transport::SnifferTest < Test::Unit::TestCase
assert_equal 'Node 1', hosts.first['name']
end
+ should "return an array of hosts as hashes with Elasticsearch 2.0 syntax" do
+ @transport.expects(:perform_request).returns __nodes_info <<-JSON
+ {
+ "ok" : true,
+ "cluster_name" : "elasticsearch_test",
+ "nodes" : {
+ "N1" : {
+ "name" : "Node 1",
+ "transport_address" : "192.168.1.23:9300",
+ "hostname" : "testhost1",
+ "version" : "0.20.6",
+ "http_address" : "192.168.1.23:9200",
+ "thrift_address" : "192.168.1.23:9500",
+ "memcached_address" : "192.168.1.23:11211"
+ }
+ }
+ }
+ JSON
+
+ hosts = @sniffer.hosts
+
+ assert_equal 1, hosts.size
+ assert_equal '192.168.1.23', hosts.first[:host]
+ assert_equal '9200', hosts.first[:port]
+ assert_equal 'Node 1', hosts.first['name']
+ end
+
+ should "return an array of hosts as hostnames when a hostname is returned" do
+ @transport.expects(:perform_request).returns __nodes_info <<-JSON
+ {
+ "ok" : true,
+ "cluster_name" : "elasticsearch_test",
+ "nodes" : {
+ "N1" : {
+ "name" : "Node 1",
+ "transport_address" : "inet[/192.168.1.23:9300]",
+ "hostname" : "testhost1",
+ "version" : "0.20.6",
+ "http_address" : "inet[testhost1.com/192.168.1.23:9200]",
+ "thrift_address" : "/192.168.1.23:9500",
+ "memcached_address" : "inet[/192.168.1.23:11211]"
+ }
+ }
+ }
+ JSON
+
+ hosts = @sniffer.hosts
+
+ assert_equal 1, hosts.size
+ assert_equal 'testhost1.com', hosts.first[:host]
+ assert_equal '9200', hosts.first[:port]
+ assert_equal 'Node 1', hosts.first['name']
+ end
+
should "skip hosts without a matching transport protocol" do
@transport = DummyTransport.new :options => { :protocol => 'memcached' }
@sniffer = Elasticsearch::Transport::Transport::Sniffer.new @transport
diff --git a/elasticsearch-transport/test/unit/transport_base_test.rb b/elasticsearch-transport/test/unit/transport_base_test.rb
index aa1df6d..7ff970a 100644
--- a/elasticsearch-transport/test/unit/transport_base_test.rb
+++ b/elasticsearch-transport/test/unit/transport_base_test.rb
@@ -8,7 +8,9 @@ class Elasticsearch::Transport::Transport::BaseTest < Test::Unit::TestCase
class DummyTransport
include Elasticsearch::Transport::Transport::Base
- def __build_connections; hosts; end
+ def __build_connection(host, options={}, block=nil)
+ Elasticsearch::Transport::Transport::Connections::Connection.new :host => host, :connection => Object.new
+ end
end
class DummyTransportPerformer < DummyTransport
@@ -24,9 +26,9 @@ class Elasticsearch::Transport::Transport::BaseTest < Test::Unit::TestCase
end
context "Transport::Base" do
- should "raise exception when it doesn't implement __build_connections" do
+ should "raise exception when it doesn't implement __build_connection" do
assert_raise NoMethodError do
- EmptyTransport.new.__build_connections
+ EmptyTransport.new.__build_connection({ :host => 'foo'}, {})
end
end
@@ -75,6 +77,11 @@ class Elasticsearch::Transport::Transport::BaseTest < Test::Unit::TestCase
should "combine authentication credentials" do
assert_equal 'http://U:P@myhost:8080', @transport.__full_url(@basic_parts.merge :user => 'U', :password => 'P')
end
+
+ should "escape the username and password" do
+ assert_equal 'http://user%40domain:foo%2Fbar@myhost:8080',
+ @transport.__full_url(@basic_parts.merge :user => 'user at domain', :password => 'foo/bar')
+ end
end
end
@@ -178,6 +185,17 @@ class Elasticsearch::Transport::Transport::BaseTest < Test::Unit::TestCase
assert_equal 'FOOBAR', response.body
end
+ should "not deserialize an empty response body" do
+ @transport.expects(:get_connection).returns(stub_everything :failures => 1)
+ @transport.serializer.expects(:load).never
+ response = @transport.perform_request 'GET', '/' do
+ Elasticsearch::Transport::Transport::Response.new 200, '', {"content-type" => 'application/json'}
+ end
+
+ assert_instance_of Elasticsearch::Transport::Transport::Response, response
+ assert_equal '', response.body
+ end
+
should "serialize non-String objects" do
@transport.serializer.expects(:dump).times(3)
@transport.__convert_to_json({:foo => 'bar'})
@@ -296,6 +314,53 @@ class Elasticsearch::Transport::Transport::BaseTest < Test::Unit::TestCase
end
end unless RUBY_1_8
+ context "performing a request with retry on status" do
+ setup do
+ DummyTransportPerformer.any_instance.stubs(:connections).returns(stub :get_connection => stub_everything(:failures => 1))
+
+ logger = Logger.new(STDERR)
+ logger.level = Logger::DEBUG
+ DummyTransportPerformer.any_instance.stubs(:logger).returns(logger)
+ @block = Proc.new { |c, u| puts "ERROR" }
+ end
+
+ should "not retry when the status code does not match" do
+ @transport = DummyTransportPerformer.new :options => { :retry_on_status => 500 }
+ assert_equal [500], @transport.instance_variable_get(:@retry_on_status)
+
+ @block.expects(:call).
+ returns(Elasticsearch::Transport::Transport::Response.new 400, 'Bad Request').
+ times(1)
+
+ @transport.logger.
+ expects(:warn).
+ with( regexp_matches(/Attempt \d to get response/) ).
+ never
+
+ assert_raise Elasticsearch::Transport::Transport::Errors::BadRequest do
+ @transport.perform_request('GET', '/', &@block)
+ end
+ end
+
+ should "retry when the status code does match" do
+ @transport = DummyTransportPerformer.new :options => { :retry_on_status => 500 }
+ assert_equal [500], @transport.instance_variable_get(:@retry_on_status)
+
+ @block.expects(:call).
+ returns(Elasticsearch::Transport::Transport::Response.new 500, 'Internal Error').
+ times(4)
+
+ @transport.logger.
+ expects(:warn).
+ with( regexp_matches(/Attempt \d to get response/) ).
+ times(4)
+
+ assert_raise Elasticsearch::Transport::Transport::Errors::InternalServerError do
+ @transport.perform_request('GET', '/', &@block)
+ end
+ end
+ end unless RUBY_1_8
+
context "logging" do
setup do
@transport = DummyTransportPerformer.new :options => { :logger => Logger.new('/dev/null') }
@@ -323,6 +388,23 @@ class Elasticsearch::Transport::Transport::BaseTest < Test::Unit::TestCase
end
end
+ should "sanitize password in the URL" do
+ fake_connection = stub :full_url => 'http://user:password@localhost:9200/_search?size=1',
+ :host => 'localhost',
+ :connection => stub_everything,
+ :failures => 0,
+ :healthy! => true
+ @transport.stubs(:get_connection).returns(fake_connection)
+
+ @transport.logger.expects(:info).with do |message|
+ assert_match /http:\/\/user:\*{1,15}@localhost\:9200/, message
+ true
+ end
+
+
+ @transport.perform_request('GET', '/') {Elasticsearch::Transport::Transport::Response.new 200, '{"foo":"bar"}' }
+ end
+
should "log a failed Elasticsearch request" do
@block = Proc.new { |c, u| puts "ERROR" }
@block.expects(:call).returns(Elasticsearch::Transport::Transport::Response.new 500, 'ERROR')
@@ -419,6 +501,34 @@ class Elasticsearch::Transport::Transport::BaseTest < Test::Unit::TestCase
@transport.reload_connections!
end
end
+
+ should "keep existing connections" do
+ @transport.__rebuild_connections :hosts => [ { :host => 'node1', :port => 1 } ], :options => { :http => {} }
+ assert_equal 1, @transport.connections.size
+
+ old_connection_id = @transport.connections.first.object_id
+
+ @transport.__rebuild_connections :hosts => [ { :host => 'node1', :port => 1 },
+ { :host => 'node2', :port => 2 } ],
+ :options => { :http => {} }
+
+ assert_equal 2, @transport.connections.size
+ assert_equal old_connection_id, @transport.connections.first.object_id
+ end
+
+ should "remove dead connections" do
+ @transport.__rebuild_connections :hosts => [ { :host => 'node1', :port => 1 },
+ { :host => 'node2', :port => 2 } ],
+ :options => { :http => {} }
+ assert_equal 2, @transport.connections.size
+
+ @transport.connections[1].dead!
+
+ @transport.__rebuild_connections :hosts => [ { :host => 'node1', :port => 1 } ], :options => { :http => {} }
+
+ assert_equal 1, @transport.connections.size
+ assert_equal 1, @transport.connections.all.size
+ end
end
context "rebuilding connections" do
@@ -426,10 +536,18 @@ class Elasticsearch::Transport::Transport::BaseTest < Test::Unit::TestCase
@transport = DummyTransport.new
end
+ should "close connections" do
+ @transport.expects(:__close_connections)
+ @transport.__rebuild_connections :hosts => [ { :scheme => 'http', :host => 'foo', :port => 1 } ], :options => { :http => {} }
+ end
+
should "should replace the connections" do
- assert_equal [], @transport.connections
- @transport.__rebuild_connections :hosts => ['foo', 'bar']
- assert_equal ['foo', 'bar'], @transport.connections
+ assert_equal 0, @transport.connections.size
+
+ @transport.__rebuild_connections :hosts => [{ :scheme => 'http', :host => 'foo', :port => 1 }],
+ :options => { :http => {} }
+
+ assert_equal 1, @transport.connections.size
end
end
diff --git a/elasticsearch-transport/test/unit/transport_curb_test.rb b/elasticsearch-transport/test/unit/transport_curb_test.rb
index a82854c..85a3aea 100644
--- a/elasticsearch-transport/test/unit/transport_curb_test.rb
+++ b/elasticsearch-transport/test/unit/transport_curb_test.rb
@@ -90,6 +90,13 @@ else
assert_equal 'foo', transport.connections.first.connection.username
assert_equal 'bar', transport.connections.first.connection.password
end
+
+ should "use global http configuration" do
+ transport = Faraday.new :hosts => [ { :host => 'foobar', :port => 1234 } ],
+ :options => { :http => { :scheme => 'https', :user => 'U', :password => 'P' } }
+
+ assert_equal 'https://U:P@foobar:1234/', transport.connections.first.full_url('')
+ end
end
end
diff --git a/elasticsearch-transport/test/unit/transport_faraday_test.rb b/elasticsearch-transport/test/unit/transport_faraday_test.rb
index 70bbab8..b6567b9 100644
--- a/elasticsearch-transport/test/unit/transport_faraday_test.rb
+++ b/elasticsearch-transport/test/unit/transport_faraday_test.rb
@@ -135,6 +135,50 @@ class Elasticsearch::Transport::Transport::HTTP::FaradayTest < Test::Unit::TestC
transport = Faraday.new :hosts => [ { :host => 'foobar', :port => 1234, :user => 'foo', :password => 'bar' } ]
assert_equal 'Basic Zm9vOmJhcg==', transport.connections.first.connection.headers['Authorization']
end
+
+ should "set the credentials if they exist in options" do
+ transport = Faraday.new :hosts => [ { :host => 'foobar', :port => 1234 } ],
+ :options => { :user => 'foo', :password => 'bar' }
+ assert_equal 'Basic Zm9vOmJhcg==', transport.connections.first.connection.headers['Authorization']
+ end
+
+ should "override options credentials if passed explicitly" do
+ transport = Faraday.new :hosts => [ { :host => 'foobar', :port => 1234, :user => 'foo', :password => 'bar' },
+ { :host => 'foobar2', :port => 1234 } ],
+ :options => { :user => 'foo2', :password => 'bar2' }
+ assert_equal 'Basic Zm9vOmJhcg==', transport.connections.first.connection.headers['Authorization']
+ assert_equal 'Basic Zm9vMjpiYXIy', transport.connections[1].connection.headers['Authorization']
+ end
+
+ should "set connection scheme to https if passed" do
+ transport = Faraday.new :hosts => [ { :host => 'foobar', :port => 1234, :scheme => 'https' } ]
+
+ assert_instance_of ::Faraday::Connection, transport.connections.first.connection
+ assert_equal 'https://foobar:1234/', transport.connections.first.connection.url_prefix.to_s
+ end
+
+ should "set connection scheme to https if it exist in options" do
+ transport = Faraday.new :hosts => [ { :host => 'foobar', :port => 1234} ],
+ :options => { :scheme => 'https' }
+
+ assert_instance_of ::Faraday::Connection, transport.connections.first.connection
+ assert_equal 'https://foobar:1234/', transport.connections.first.connection.url_prefix.to_s
+ end
+
+ should "override options scheme if passed explicitly" do
+ transport = Faraday.new :hosts => [ { :host => 'foobar', :port => 1234, :scheme => 'http'} ],
+ :options => { :scheme => 'https' }
+
+ assert_instance_of ::Faraday::Connection, transport.connections.first.connection
+ assert_equal 'http://foobar:1234/', transport.connections.first.connection.url_prefix.to_s
+ end
+
+ should "use global http configuration" do
+ transport = Faraday.new :hosts => [ { :host => 'foobar', :port => 1234 } ],
+ :options => { :http => { :scheme => 'https', :user => 'U', :password => 'P' } }
+
+ assert_equal 'https://U:P@foobar:1234/', transport.connections.first.full_url('')
+ end
end
end
diff --git a/elasticsearch-transport/test/unit/transport_manticore_test.rb b/elasticsearch-transport/test/unit/transport_manticore_test.rb
index b551a23..802f93a 100644
--- a/elasticsearch-transport/test/unit/transport_manticore_test.rb
+++ b/elasticsearch-transport/test/unit/transport_manticore_test.rb
@@ -26,6 +26,12 @@ else
assert_instance_of ::Manticore::Client, @transport.connections.first.connection
end
+ should "not close connections in __close_connections" do
+ assert_equal 1, @transport.connections.size
+ @transport.__close_connections
+ assert_equal 1, @transport.connections.size
+ end
+
should "perform the request" do
@transport.connections.first.connection.expects(:get).returns(stub_everything)
@transport.perform_request 'GET', '/'
diff --git a/elasticsearch-watcher/elasticsearch-watcher.gemspec b/elasticsearch-watcher/elasticsearch-watcher.gemspec
index 132c1f7..6833094 100644
--- a/elasticsearch-watcher/elasticsearch-watcher.gemspec
+++ b/elasticsearch-watcher/elasticsearch-watcher.gemspec
@@ -21,7 +21,7 @@ Gem::Specification.new do |s|
s.add_dependency "elasticsearch-api"
s.add_development_dependency "bundler", "~> 1.3"
- s.add_development_dependency "rake"
+ s.add_development_dependency "rake", "~> 11.1"
s.add_development_dependency "elasticsearch"
s.add_development_dependency "elasticsearch-extensions"
diff --git a/elasticsearch/elasticsearch.gemspec b/elasticsearch/elasticsearch.gemspec
index dd65ed9..8796896 100644
--- a/elasticsearch/elasticsearch.gemspec
+++ b/elasticsearch/elasticsearch.gemspec
@@ -20,11 +20,16 @@ Gem::Specification.new do |s|
s.extra_rdoc_files = [ "README.md", "LICENSE.txt" ]
s.rdoc_options = [ "--charset=UTF-8" ]
- s.add_dependency "elasticsearch-transport", '1.0.12'
- s.add_dependency "elasticsearch-api", '1.0.12'
+ s.add_dependency "elasticsearch-transport", '2.0.0'
+ s.add_dependency "elasticsearch-api", '2.0.0'
s.add_development_dependency "bundler", "> 1"
- s.add_development_dependency "rake"
+
+ if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9'
+ s.add_development_dependency "rake", "~> 11.1"
+ else
+ s.add_development_dependency "rake", "< 11.0"
+ end
if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9'
s.add_development_dependency "elasticsearch-extensions"
diff --git a/elasticsearch/lib/elasticsearch/version.rb b/elasticsearch/lib/elasticsearch/version.rb
index 4c6a546..9f7944e 100644
--- a/elasticsearch/lib/elasticsearch/version.rb
+++ b/elasticsearch/lib/elasticsearch/version.rb
@@ -1,3 +1,3 @@
module Elasticsearch
- VERSION = "1.0.12"
+ VERSION = "2.0.0"
end
diff --git a/elasticsearch/test/integration/client_integration_test.rb b/elasticsearch/test/integration/client_integration_test.rb
index 6f7934a..68a418a 100644
--- a/elasticsearch/test/integration/client_integration_test.rb
+++ b/elasticsearch/test/integration/client_integration_test.rb
@@ -8,6 +8,10 @@ module Elasticsearch
Elasticsearch::Extensions::Test::Cluster.start(nodes: 2) if ENV['SERVER'] and not Elasticsearch::Extensions::Test::Cluster.running?
end
+ shutdown do
+ Elasticsearch::Extensions::Test::Cluster.stop if ENV['SERVER'] and Elasticsearch::Extensions::Test::Cluster.running?
+ end
+
context "Elasticsearch client" do
setup do
@port = (ENV['TEST_CLUSTER_PORT'] || 9250).to_i
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-ruby-extras/ruby-elasticsearch.git
More information about the Pkg-ruby-extras-commits
mailing list