@@ -221,8 +221,8 @@ def info(self, params=None):
221221 """
222222 return self .transport .perform_request ('GET' , '/' , params = params )
223223
224- @query_params ('parent' , 'pipeline' , 'refresh' , 'routing' ,
225- 'timeout' , ' timestamp' , 'ttl' , 'version' , 'version_type' )
224+ @query_params ('parent' , 'pipeline' , 'refresh' , 'routing' , 'timeout' ,
225+ 'timestamp' , 'ttl' , 'version' , 'version_type' , 'wait_for_active_shards ' )
226226 def create (self , index , doc_type , id , body , params = None ):
227227 """
228228 Adds a typed JSON document in a specific index, making it searchable.
@@ -299,7 +299,9 @@ def index(self, index, doc_type, body, id=None, params=None):
299299 return self .transport .perform_request ('POST' if id in SKIP_IN_PATH else 'PUT' ,
300300 _make_path (index , doc_type , id ), params = params , body = body )
301301
302- @query_params ('parent' , 'preference' , 'realtime' , 'refresh' , 'routing' )
302+ @query_params ('_source' , '_source_exclude' , '_source_include' , 'parent' ,
303+ 'preference' , 'realtime' , 'refresh' , 'routing' , 'stored_fields' ,
304+ 'version' , 'version_type' )
303305 def exists (self , index , doc_type , id , params = None ):
304306 """
305307 Returns a boolean indicating whether or not given document exists in Elasticsearch.
@@ -309,6 +311,12 @@ def exists(self, index, doc_type, id, params=None):
309311 :arg doc_type: The type of the document (use `_all` to fetch the first
310312 document matching the ID across all types)
311313 :arg id: The document ID
314+ :arg _source: True or false to return the _source field or not, or a
315+ list of fields to return
316+ :arg _source_exclude: A list of fields to exclude from the returned
317+ _source field
318+ :arg _source_include: A list of fields to extract and return from the
319+ _source field
312320 :arg parent: The ID of the parent document
313321 :arg preference: Specify the node or shard the operation should be
314322 performed on (default: random)
@@ -317,13 +325,53 @@ def exists(self, index, doc_type, id, params=None):
317325 :arg refresh: Refresh the shard containing the document before
318326 performing the operation
319327 :arg routing: Specific routing value
328+ :arg stored_fields: A comma-separated list of stored fields to return in
329+ the response
330+ :arg version: Explicit version number for concurrency control
331+ :arg version_type: Specific version type, valid choices are: 'internal',
332+ 'external', 'external_gte', 'force'
320333 """
321334 for param in (index , doc_type , id ):
322335 if param in SKIP_IN_PATH :
323336 raise ValueError ("Empty value passed for a required argument." )
324337 return self .transport .perform_request ('HEAD' , _make_path (index ,
325338 doc_type , id ), params = params )
326339
340+ @query_params ('_source' , '_source_exclude' , '_source_include' , 'parent' ,
341+ 'preference' , 'realtime' , 'refresh' , 'routing' , 'version' ,
342+ 'version_type' )
343+ def exists_source (self , index , doc_type , id , params = None ):
344+ """
345+ `<http://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html>`_
346+
347+ :arg index: The name of the index
348+ :arg doc_type: The type of the document; use `_all` to fetch the first
349+ document matching the ID across all types
350+ :arg id: The document ID
351+ :arg _source: True or false to return the _source field or not, or a
352+ list of fields to return
353+ :arg _source_exclude: A list of fields to exclude from the returned
354+ _source field
355+ :arg _source_include: A list of fields to extract and return from the
356+ _source field
357+ :arg parent: The ID of the parent document
358+ :arg preference: Specify the node or shard the operation should be
359+ performed on (default: random)
360+ :arg realtime: Specify whether to perform the operation in realtime or
361+ search mode
362+ :arg refresh: Refresh the shard containing the document before
363+ performing the operation
364+ :arg routing: Specific routing value
365+ :arg version: Explicit version number for concurrency control
366+ :arg version_type: Specific version type, valid choices are: 'internal',
367+ 'external', 'external_gte', 'force'
368+ """
369+ for param in (index , doc_type , id ):
370+ if param in SKIP_IN_PATH :
371+ raise ValueError ("Empty value passed for a required argument." )
372+ return self .transport .perform_request ('HEAD' , _make_path (index ,
373+ doc_type , id , '_source' ), params = params )
374+
327375 @query_params ('_source' , '_source_exclude' , '_source_include' , 'parent' ,
328376 'preference' , 'realtime' , 'refresh' , 'routing' , 'stored_fields' ,
329377 'version' , 'version_type' )
@@ -449,7 +497,7 @@ def update(self, index, doc_type, id, body=None, params=None):
449497 :arg _source_include: A list of fields to extract and return from the
450498 _source field
451499 :arg fields: A comma-separated list of fields to return in the response
452- :arg lang: The script language (default: groovy )
500+ :arg lang: The script language (default: painless )
453501 :arg parent: ID of the parent document. Is is only used for routing and
454502 when for the upsert request
455503 :arg refresh: If `true` then refresh the effected shards to make this
@@ -479,13 +527,14 @@ def update(self, index, doc_type, id, body=None, params=None):
479527 doc_type , id , '_update' ), params = params , body = body )
480528
481529 @query_params ('_source' , '_source_exclude' , '_source_include' ,
482- 'allow_no_indices' , 'analyze_wildcard' , 'analyzer' , 'default_operator' ,
483- 'df' , 'docvalue_fields' , 'expand_wildcards' , 'explain' ,
484- 'fielddata_fields' , 'from_' , 'ignore_unavailable' , 'lenient' ,
485- 'lowercase_expanded_terms' , 'preference' , 'q' , 'request_cache' ,
486- 'routing' , 'scroll' , 'search_type' , 'size' , 'sort' , 'stats' ,
487- 'stored_fields' , 'suggest_field' , 'suggest_mode' , 'suggest_size' ,
488- 'suggest_text' , 'terminate_after' , 'timeout' , 'track_scores' , 'version' )
530+ 'allow_no_indices' , 'analyze_wildcard' , 'analyzer' ,
531+ 'batched_reduce_size' , 'default_operator' , 'df' , 'docvalue_fields' ,
532+ 'expand_wildcards' , 'explain' , 'fielddata_fields' , 'from_' ,
533+ 'ignore_unavailable' , 'lenient' , 'lowercase_expanded_terms' ,
534+ 'preference' , 'q' , 'request_cache' , 'routing' , 'scroll' ,
535+ 'search_type' , 'size' , 'sort' , 'stats' , 'stored_fields' ,
536+ 'suggest_field' , 'suggest_mode' , 'suggest_size' , 'suggest_text' ,
537+ 'terminate_after' , 'timeout' , 'track_scores' , 'typed_keys' , 'version' )
489538 def search (self , index = None , doc_type = None , body = None , params = None ):
490539 """
491540 Execute a search query and get back search hits that match the query.
@@ -508,6 +557,11 @@ def search(self, index=None, doc_type=None, body=None, params=None):
508557 :arg analyze_wildcard: Specify whether wildcard and prefix queries
509558 should be analyzed (default: false)
510559 :arg analyzer: The analyzer to use for the query string
560+ :arg batched_reduce_size: The number of shard results that should be
561+ reduced at once on the coordinating node. This value should be used
562+ as a protection mechanism to reduce the memory overhead per search
563+ request if the potential number of shards in the request can be
564+ large., default 512
511565 :arg default_operator: The default operator for query string query (AND
512566 or OR), default 'OR', valid choices are: 'AND', 'OR'
513567 :arg df: The field to use as default where no field prefix is given in
@@ -556,6 +610,8 @@ def search(self, index=None, doc_type=None, body=None, params=None):
556610 :arg timeout: Explicit operation timeout
557611 :arg track_scores: Whether to calculate and return scores even if they
558612 are not used for sorting
613+ :arg typed_keys: Specify whether aggregation and suggester names should
614+ be prefixed by their respective types in the response
559615 :arg version: Specify whether to return document version as part of a
560616 hit
561617 """
@@ -643,6 +699,9 @@ def update_by_query(self, index, doc_type=None, body=None, params=None):
643699 :arg search_type: Search operation type, valid choices are:
644700 'query_then_fetch', 'dfs_query_then_fetch'
645701 :arg size: Number of hits to return (default: 10)
702+ :arg slices: The number of slices this task should be divided into.
703+ Defaults to 1 meaning the task isn't sliced into subtasks., default
704+ 1
646705 :arg sort: A comma-separated list of <field>:<direction> pairs
647706 :arg stats: Specific 'tag' of the request for logging and statistical
648707 purposes
@@ -679,7 +738,7 @@ def update_by_query(self, index, doc_type=None, body=None, params=None):
679738 return self .transport .perform_request ('POST' , _make_path (index ,
680739 doc_type , '_update_by_query' ), params = params , body = body )
681740
682- @query_params ('refresh' , 'requests_per_second' , 'timeout' ,
741+ @query_params ('refresh' , 'requests_per_second' , 'slices' , ' timeout' ,
683742 'wait_for_active_shards' , 'wait_for_completion' )
684743 def reindex (self , body , params = None ):
685744 """
@@ -692,6 +751,9 @@ def reindex(self, body, params=None):
692751 :arg requests_per_second: The throttle to set on this request in sub-
693752 requests per second. -1 means set no throttle as does "unlimited"
694753 which is the only non-float this accepts., default 0
754+ :arg slices: The number of slices this task should be divided into.
755+ Defaults to 1 meaning the task isn't sliced into subtasks., default
756+ 1
695757 :arg timeout: Time each individual bulk request should wait for shards
696758 that are unavailable., default '1m'
697759 :arg wait_for_active_shards: Sets the number of shard copies that must
@@ -700,7 +762,7 @@ def reindex(self, body, params=None):
700762 copies, otherwise set to any non-negative value less than or equal
701763 to the total number of copies for the shard (number of replicas + 1)
702764 :arg wait_for_completion: Should the request should block until the
703- reindex is complete., default True
765+ reindex is complete., default Fa
704766 """
705767 if body in SKIP_IN_PATH :
706768 raise ValueError ("Empty value passed for a required argument 'body'." )
@@ -725,7 +787,7 @@ def reindex_rethrottle(self, task_id=None, params=None):
725787 'default_operator' , 'df' , 'docvalue_fields' , 'expand_wildcards' ,
726788 'explain' , 'from_' , 'ignore_unavailable' , 'lenient' ,
727789 'lowercase_expanded_terms' , 'preference' , 'q' , 'refresh' ,
728- 'request_cache' , 'requests_per_second' , 'routing' , 'scroll' ,
790+ 'request_cache' , 'requests_per_second' , 'routing' , 'scroll' , 'slices' ,
729791 'scroll_size' , 'search_timeout' , 'search_type' , 'size' , 'sort' , 'stats' ,
730792 'stored_fields' , 'suggest_field' , 'suggest_mode' , 'suggest_size' ,
731793 'suggest_text' , 'terminate_after' , 'timeout' , 'track_scores' , 'version' ,
@@ -779,7 +841,7 @@ def delete_by_query(self, index, body, doc_type=None, params=None):
779841 :arg request_cache: Specify if request cache should be used for this
780842 request or not, defaults to index level setting
781843 :arg requests_per_second: The throttle for this request in sub-requests
782- per second. -1 means set no throttle., default 0
844+ per second. -1 means no throttle., default 0
783845 :arg routing: A comma-separated list of specific routing values
784846 :arg scroll: Specify how long a consistent view of the index should be
785847 maintained for scrolled search
@@ -790,6 +852,9 @@ def delete_by_query(self, index, body, doc_type=None, params=None):
790852 :arg search_type: Search operation type, valid choices are:
791853 'query_then_fetch', 'dfs_query_then_fetch'
792854 :arg size: Number of hits to return (default: 10)
855+ :arg slices: The number of slices this task should be divided into.
856+ Defaults to 1 meaning the task isn't sliced into subtasks., default
857+ 1
793858 :arg sort: A comma-separated list of <field>:<direction> pairs
794859 :arg stats: Specific 'tag' of the request for logging and statistical
795860 purposes
@@ -817,7 +882,7 @@ def delete_by_query(self, index, body, doc_type=None, params=None):
817882 equal to the total number of copies for the shard (number of
818883 replicas + 1)
819884 :arg wait_for_completion: Should the request should block until the
820- delete-by-query is complete., default False
885+ delete-by-query is complete., default True
821886 """
822887 for param in (index , body ):
823888 if param in SKIP_IN_PATH :
@@ -855,8 +920,9 @@ def search_shards(self, index=None, doc_type=None, params=None):
855920 return self .transport .perform_request ('GET' , _make_path (index ,
856921 doc_type , '_search_shards' ), params = params )
857922
858- @query_params ('allow_no_indices' , 'expand_wildcards' , 'ignore_unavailable' ,
859- 'preference' , 'routing' , 'scroll' , 'search_type' )
923+ @query_params ('allow_no_indices' , 'expand_wildcards' , 'explain' ,
924+ 'ignore_unavailable' , 'preference' , 'profile' , 'routing' , 'scroll' ,
925+ 'search_type' , 'typed_keys' )
860926 def search_template (self , index = None , doc_type = None , body = None , params = None ):
861927 """
862928 A query that accepts a query template and a map of key/value pairs to
@@ -874,16 +940,21 @@ def search_template(self, index=None, doc_type=None, body=None, params=None):
874940 :arg expand_wildcards: Whether to expand wildcard expression to concrete
875941 indices that are open, closed or both., default 'open', valid
876942 choices are: 'open', 'closed', 'none', 'all'
943+ :arg explain: Specify whether to return detailed information about score
944+ computation as part of a hit
877945 :arg ignore_unavailable: Whether specified concrete indices should be
878946 ignored when unavailable (missing or closed)
879947 :arg preference: Specify the node or shard the operation should be
880948 performed on (default: random)
949+ :arg profile: Specify whether to profile the query execution
881950 :arg routing: A comma-separated list of specific routing values
882951 :arg scroll: Specify how long a consistent view of the index should be
883952 maintained for scrolled search
884953 :arg search_type: Search operation type, valid choices are:
885954 'query_then_fetch', 'query_and_fetch', 'dfs_query_then_fetch',
886955 'dfs_query_and_fetch'
956+ :arg typed_keys: Specify whether aggregation and suggester names should
957+ be prefixed by their respective types in the response
887958 """
888959 return self .transport .perform_request ('GET' , _make_path (index ,
889960 doc_type , '_search' , 'template' ), params = params , body = body )
@@ -1095,7 +1166,7 @@ def bulk(self, body, index=None, doc_type=None, params=None):
10951166 return self .transport .perform_request ('POST' , _make_path (index ,
10961167 doc_type , '_bulk' ), params = params , body = self ._bulk_body (body ))
10971168
1098- @query_params ('max_concurrent_searches' , 'search_type' )
1169+ @query_params ('max_concurrent_searches' , 'search_type' , 'typed_keys' )
10991170 def msearch (self , body , index = None , doc_type = None , params = None ):
11001171 """
11011172 Execute several search requests within the same API.
@@ -1111,6 +1182,8 @@ def msearch(self, body, index=None, doc_type=None, params=None):
11111182 :arg search_type: Search operation type, valid choices are:
11121183 'query_then_fetch', 'query_and_fetch', 'dfs_query_then_fetch',
11131184 'dfs_query_and_fetch'
1185+ :arg typed_keys: Specify whether aggregation and suggester names should
1186+ be prefixed by their respective types in the response
11141187 """
11151188 if body in SKIP_IN_PATH :
11161189 raise ValueError ("Empty value passed for a required argument 'body'." )
0 commit comments