[Python-modules-commits] [elasticsearch-curator] 01/06: Import elasticsearch-curator_4.0.6.orig.tar.gz

Apollon Oikonomopoulos apoikos at moszumanska.debian.org
Wed Sep 7 12:44:36 UTC 2016


This is an automated email from the git hooks/post-receive script.

apoikos pushed a commit to branch master
in repository elasticsearch-curator.

commit 7563cf75508aaec603f15adf396ceefe31c4c201
Author: Apollon Oikonomopoulos <apoikos at debian.org>
Date:   Wed Sep 7 15:28:37 2016 +0300

    Import elasticsearch-curator_4.0.6.orig.tar.gz
---
 .travis.yml                             |   4 +-
 CONTRIBUTORS                            |   5 +
 MANIFEST.in                             |   3 +
 README.rst                              |  26 ++++++
 curator/__init__.py                     |   2 +-
 curator/_version.py                     |   2 +-
 curator/actions.py                      |  43 +++++----
 curator/cli.py                          |  26 +++---
 curator/defaults/__init__.py            |   0
 curator/defaults/settings.py            | 159 ++++++++++++++++++++++++++++++++
 curator/indexlist.py                    |  49 ++++------
 curator/logtools.py                     |  17 +---
 curator/repomgrcli.py                   |  31 +++----
 curator/settings.py                     | 148 -----------------------------
 curator/snapshotlist.py                 |  21 +++--
 curator/utils.py                        |  62 +++++++------
 docs/Changelog.rst                      |  95 +++++++++++++++++++
 docs/asciidoc/about.asciidoc            |  16 +++-
 docs/asciidoc/actions.asciidoc          |   2 +
 docs/asciidoc/command-line.asciidoc     |   6 ++
 docs/asciidoc/configuration.asciidoc    |  30 ++++++
 docs/asciidoc/examples.asciidoc         |   4 +-
 docs/asciidoc/faq.asciidoc              |  14 +++
 docs/asciidoc/filter_elements.asciidoc  |  35 +++++--
 docs/asciidoc/filters.asciidoc          |   5 +-
 docs/asciidoc/getting_started.asciidoc  |   4 +
 docs/asciidoc/index.asciidoc            |   2 +-
 docs/asciidoc/installation.asciidoc     |   7 ++
 docs/asciidoc/options.asciidoc          |  29 ++++++
 examples/curator.yml                    |   1 +
 setup.py                                |  15 ++-
 test/integration/test_delete_indices.py |  89 +++++++++++++++++-
 test/integration/test_snapshot.py       |  42 +++++++++
 test/integration/testvars.py            |  87 ++++++++++++++++-
 test/unit/test_action_allocation.py     |   2 +-
 test/unit/test_class_index_list.py      |  22 +++--
 test/unit/test_utils.py                 |  10 ++
 travis-run.sh                           |  15 ++-
 38 files changed, 822 insertions(+), 308 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index fbff486..b352c37 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -9,8 +9,8 @@ env:
   - ES_VERSION=2.0.0
   - ES_VERSION=2.1.1
   - ES_VERSION=2.2.2
-  - ES_VERSION=2.3.3
-  - ES_VERSION=5.0.0-alpha4
+  - ES_VERSION=2.3.5
+  - ES_VERSION=5.0.0-alpha5
 
 os: linux
 
diff --git a/CONTRIBUTORS b/CONTRIBUTORS
index d770d28..1b9334f 100644
--- a/CONTRIBUTORS
+++ b/CONTRIBUTORS
@@ -74,3 +74,8 @@ Contributors:
 * Thibaut Ackermann (thib-ack)
 * (zzugg)
 * Julien Mancuso (petitout)
+* Spencer Herzberg (sherzberg)
+* Luke Waite (lukewaite)
+* (dtrv)
+* Christopher "Chief" Najewicz (chiefy)
+* Filipe Gonçalves (basex)
diff --git a/MANIFEST.in b/MANIFEST.in
index 36a1543..c90e98c 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -5,8 +5,11 @@ include README.rst
 include Dockerfile
 recursive-exclude * __pycache__
 recursive-exclude * *.py[co]
+recursive-include curator *
 recursive-include test *
 recursive-include docs *
+recursive-exclude curator *.pyc
+recursive-exclude curator *.pyo
 recursive-exclude docs *.pyc
 recursive-exclude docs *.pyo
 recursive-exclude test *.pyc
diff --git a/README.rst b/README.rst
index 82cd953..b23f326 100644
--- a/README.rst
+++ b/README.rst
@@ -9,6 +9,32 @@ Have indices in Elasticsearch? This is the tool for you!
 Like a museum curator manages the exhibits and collections on display,
 Elasticsearch Curator helps you curate, or manage your indices.
 
+Compatibility Matrix
+=======
+
++--------+----------+----------+----------+
+|Version | ES 1.x   | ES 2.x   | ES 5.x   |
++========+==========+==========+==========+
+|    3   |    yes   |     yes  |     no   |
++--------+----------+----------+----------+
+|    4   |    no    |     yes  |     yes  |
++--------+----------+----------+----------+
+
+It is important to note that Curator 4 will not work with indices created in
+versions of Elasticsearch older than 1.4 (if they have been subsequently
+re-indexed, they will work).  This is because those older indices lack index
+metadata that Curator 4 requires.  Curator 4 will simply exclude any such
+indices from being acted on, and you will get a warning message like the
+following:
+
+::
+
+    2016-07-31 10:36:17,423 WARNING Index: YOUR_INDEX_NAME has no
+    "creation_date"! This implies that the index predates Elasticsearch v1.4.
+    For safety, this index will be removed from the actionable list.
+
+
+
 Build Status
 ------------
 
diff --git a/curator/__init__.py b/curator/__init__.py
index a9b39d3..ff44ee0 100644
--- a/curator/__init__.py
+++ b/curator/__init__.py
@@ -1,5 +1,5 @@
 from .exceptions import *
-from .settings import *
+from .defaults import settings
 from .logtools import *
 from .utils import *
 from .indexlist import IndexList
diff --git a/curator/_version.py b/curator/_version.py
index 1a3bef5..9066d7f 100644
--- a/curator/_version.py
+++ b/curator/_version.py
@@ -1 +1 @@
-__version__ = '4.0.1'
+__version__ = '4.0.6'
diff --git a/curator/actions.py b/curator/actions.py
index 95ac3d0..92cdd24 100644
--- a/curator/actions.py
+++ b/curator/actions.py
@@ -107,6 +107,7 @@ class Alias(object):
         Run the API call `update_aliases` with the results of `body()`
         """
         self.loggit.info('Updating aliases...')
+        self.loggit.info('Alias actions: {0}'.format(self.body()))
         try:
             self.client.indices.update_aliases(body=self.body())
         except Exception as e:
@@ -153,10 +154,8 @@ class Allocation(object):
         #: Instance variable.
         #: Populated at instance creation time. Value is
         #: ``index.routing.allocation.`` `allocation_type` ``.`` `key` ``.`` `value`
-        self.body       = (
-            'index.routing.allocation.'
-            '{0}.{1}={2}'.format(allocation_type, key, value)
-        )
+        bkey = 'index.routing.allocation.{0}.{1}'.format(allocation_type, key)
+        self.body       = { bkey : value }
         #: Instance variable.
         #: Internal reference to `wait_for_completion`
         self.wfc        = wait_for_completion
@@ -176,7 +175,7 @@ class Allocation(object):
         Change allocation settings for indices in `index_list.indices` with the
         settings in `body`.
         """
-        self.loggit.info(
+        self.loggit.debug(
             'Cannot get change shard routing allocation of closed indices.  '
             'Omitting any closed indices.'
         )
@@ -191,12 +190,12 @@ class Allocation(object):
                     index=to_csv(l), body=self.body
                 )
                 if self.wfc:
-                    logger.info(
+                    logger.debug(
                         'Waiting for shards to complete relocation for indices:'
                         ' {0}'.format(to_csv(l))
                     )
                     self.client.cluster.health(index=to_csv(l),
-                        level='indices', wait_for_relocation_shards=0,
+                        level='indices', wait_for_relocating_shards=0,
                         timeout=self.timeout,
                     )
         except Exception as e:
@@ -236,7 +235,8 @@ class Close(object):
         """
         self.index_list.filter_closed()
         self.index_list.empty_list_check()
-        self.loggit.info('Closing selected indices')
+        self.loggit.info(
+            'Closing selected indices: {0}'.format(self.index_list.indices))
         try:
             index_lists = chunk_index_list(self.index_list.indices)
             for l in index_lists:
@@ -381,7 +381,8 @@ class DeleteIndices(object):
         Delete indices in `index_list.indices`
         """
         self.index_list.empty_list_check()
-        self.loggit.info('Deleting selected indices')
+        self.loggit.info(
+            'Deleting selected indices: {0}'.format(self.index_list.indices))
         try:
             index_lists = chunk_index_list(self.index_list.indices)
             for l in index_lists:
@@ -477,7 +478,8 @@ class Open(object):
         Open closed indices in `index_list.indices`
         """
         self.index_list.empty_list_check()
-        self.loggit.info('Opening selected indices')
+        self.loggit.info(
+            'Opening selected indices: {0}'.format(self.index_list.indices))
         try:
             index_lists = chunk_index_list(self.index_list.indices)
             for l in index_lists:
@@ -529,22 +531,22 @@ class Replicas(object):
         Update the replica count of indices in `index_list.indices`
         """
         self.index_list.empty_list_check()
-        self.loggit.info(
+        self.loggit.debug(
             'Cannot get update replica count of closed indices.  '
             'Omitting any closed indices.'
         )
         self.index_list.filter_closed()
         self.loggit.info(
-            'Updating the replica count of selected indices to '
-            '{0}'.format(self.count)
+            'Setting the replica count to {0} for indices: '
+            '{1}'.format(self.count, self.index_list.indices)
         )
         try:
             index_lists = chunk_index_list(self.index_list.indices)
             for l in index_lists:
                 self.client.indices.put_settings(index=to_csv(l),
-                    body='number_of_replicas={0}'.format(self.count))
+                    body={'number_of_replicas' : self.count})
                 if self.wfc and self.count > 0:
-                    logger.info(
+                    logger.debug(
                         'Waiting for shards to complete replication for '
                         'indices: {0}'.format(to_csv(l))
                     )
@@ -645,6 +647,9 @@ class Snapshot(object):
         :type skip_repo_fs_check: bool
         """
         verify_index_list(ilo)
+        # Check here and don't bother with the rest of this if there are no
+        # indices in the index list.
+        ilo.empty_list_check()
         if not repository_exists(ilo.client, repository=repository):
             raise ActionError(
                 'Cannot snapshot indices to missing repository: '
@@ -732,6 +737,9 @@ class Snapshot(object):
         if snapshot_running(self.client):
             raise SnapshotInProgress('Snapshot already in progress.')
         try:
+            self.loggit.info('Creating snapshot "{0}" from indices: '
+                '{1}'.format(self.name, self.index_list.indices)
+            )
             self.client.snapshot.create(
                 repository=self.repository, snapshot=self.name, body=self.body,
                 wait_for_completion=self.wait_for_completion
@@ -852,7 +860,7 @@ class Restore(object):
                 'rename_replacement' : self.rename_replacement,
             }
         if extra_settings:
-            self.loggit.info(
+            self.loggit.debug(
                 'Adding extra_settings to restore body: '
                 '{0}'.format(extra_settings)
             )
@@ -948,6 +956,9 @@ class Restore(object):
             raise SnapshotInProgress(
                 'Cannot restore while a snapshot is in progress.')
         try:
+            self.loggit.info('Restoring indices "{0}" from snapshot: '
+                '{1}'.format(self.indices, self.name)
+            )
             self.client.snapshot.restore(
                 repository=self.repository, snapshot=self.name, body=self.body,
                 wait_for_completion=self.wfc
diff --git a/curator/cli.py b/curator/cli.py
index 0a62ff9..8da44ab 100644
--- a/curator/cli.py
+++ b/curator/cli.py
@@ -2,16 +2,14 @@ import os, sys
 import yaml
 import logging
 import click
-import copy
-from .settings import ACTION_DEFAULTS, CONFIG_FILE, CLIENT_DEFAULTS, \
-    LOGGING_DEFAULTS, OPTION_DEFAULTS
+from .defaults import settings
 from .exceptions import *
 from .utils import *
 from .indexlist import IndexList
 from .snapshotlist import SnapshotList
 from .actions import *
 from ._version import __version__
-from .logtools import LogInfo
+from .logtools import LogInfo, Whitelist, Blacklist
 
 try:
     from logging import NullHandler
@@ -53,9 +51,7 @@ def process_action(client, config, **kwargs):
     mykwargs = {}
 
     if action in CLASS_MAP:
-        # deepcopy guarantees clean copies of the defaults, and nothing getting
-        # altered in "pass by reference," which was happening in testing.
-        mykwargs = copy.deepcopy(ACTION_DEFAULTS[action])
+        mykwargs = settings.action_defaults()[action]
         action_class = CLASS_MAP[action]
     else:
         raise ConfigurationError(
@@ -117,7 +113,7 @@ def process_action(client, config, **kwargs):
 @click.command()
 @click.option('--config',
     help="Path to configuration file. Default: ~/.curator/curator.yml",
-    type=click.Path(exists=True), default=CONFIG_FILE
+    type=click.Path(exists=True), default=settings.config_file()
 )
 @click.option('--dry-run', is_flag=True, help='Do not perform any changes.')
 @click.argument('action_file', type=click.Path(exists=True), nargs=1)
@@ -133,11 +129,11 @@ def cli(config, dry_run, action_file):
     # Get default options and overwrite with any changes
     try:
         yaml_log_opts = prune_nones(yaml_config['logging'])
-        log_opts      = LOGGING_DEFAULTS
+        log_opts      = settings.logs()
         log_opts.update(yaml_log_opts)
     except KeyError:
         # Use the defaults if there is no logging section
-        log_opts = LOGGING_DEFAULTS
+        log_opts = settings.logs()
     # Set up logging
     loginfo = LogInfo(log_opts)
     logging.root.addHandler(loginfo.handler)
@@ -146,11 +142,15 @@ def cli(config, dry_run, action_file):
     # Set up NullHandler() to handle nested elasticsearch.trace Logger
     # instance in elasticsearch python client
     logging.getLogger('elasticsearch.trace').addHandler(NullHandler())
+    if log_opts['blacklist']:
+        for bl_entry in ensure_list(log_opts['blacklist']):
+            for handler in logging.root.handlers:
+                handler.addFilter(Blacklist(bl_entry))
 
     # Get default client options and overwrite with any changes
     try:
         yaml_client  = prune_nones(yaml_config['client'])
-        client_args  = CLIENT_DEFAULTS
+        client_args  = settings.client()
         client_args.update(yaml_client)
     except KeyError:
         logger.critical(
@@ -177,7 +177,7 @@ def cli(config, dry_run, action_file):
         logger.info('Action #{0}: {1}'.format(idx, action))
         if not 'options' in actions[idx] or \
                 type(actions[idx]['options']) is not type(dict()):
-            actions[idx]['options'] = OPTION_DEFAULTS
+            actions[idx]['options'] = settings.options()
         # Assign and remove these keys from the options as the action will
         # raise an exception if they are passed as kwargs
         action_disabled = actions[idx]['options'].pop('disable_action', False)
@@ -249,3 +249,5 @@ def cli(config, dry_run, action_file):
                     )
                 else:
                     sys.exit(1)
+        logger.info('Action #{0}: completed'.format(idx))
+    logger.info('Job completed.')
diff --git a/curator/defaults/__init__.py b/curator/defaults/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/curator/defaults/settings.py b/curator/defaults/settings.py
new file mode 100644
index 0000000..c767f08
--- /dev/null
+++ b/curator/defaults/settings.py
@@ -0,0 +1,159 @@
+import os
+
+# Elasticsearch versions supported
+def version_max():
+    return (5, 1, 0)
+def version_min():
+    return (2, 0, 0)
+
+def config_file():
+    return os.path.join(os.path.expanduser('~'), '.curator', 'curator.yml')
+
+def client():
+    return {
+        'hosts': '127.0.0.1',
+        'port': 9200,
+        'url_prefix': '',
+        'http_auth': None,
+        'use_ssl': False,
+        'certificate': None,
+        'client_cert': None,
+        'client_key': None,
+        'aws_key': None,
+        'aws_secret_key': None,
+        'aws_region': None,
+        'ssl_no_validate': False,
+        'timeout': 30,
+        'master_only': False,
+    }
+
+def logs():
+    return {
+        'loglevel': 'INFO',
+        'logfile': None,
+        'logformat': 'default',
+        'blacklist': ['elasticsearch', 'urllib3'],
+    }
+
+def options():
+    return {
+        'ignore_empty_list': False,
+        'timeout_override': None,
+        'continue_if_exception': False,
+        'disable_action': False,
+    }
+
+def regex_map():
+    return {
+        'timestring': r'^.*{0}.*$',
+        'regex': r'{0}',
+        'prefix': r'^{0}.*$',
+        'suffix': r'^.*{0}$',
+    }
+
+def date_regex():
+    return {
+        'Y' : '4',
+        'y' : '2',
+        'm' : '2',
+        'W' : '2',
+        'U' : '2',
+        'd' : '2',
+        'H' : '2',
+        'M' : '2',
+        'S' : '2',
+        'j' : '3',
+    }
+
+def action_defaults():
+    return {
+        'alias' : {
+            'name' : None,
+            'extra_settings' : {},
+        },
+        'allocation' : {
+            'key' : None,
+            'value' : None,
+            'allocation_type' : 'require',
+            'wait_for_completion' : False,
+            'timeout' : 30,
+        },
+        'close' : { 'delete_aliases' : False },
+        'create_index' : {
+            'name' : None,
+            'extra_settings' : {},
+        },
+        'delete_indices' : { 'master_timeout' : 30 },
+        'delete_snapshots' : {
+            'repository' : None,
+            'retry_interval' : 120,
+            'retry_count' : 3,
+        },
+        'forcemerge' : {
+            'delay' : 0,
+            'max_num_segments' : 2,
+        },
+        'open' : {},
+        'replicas' : {
+            'count' : None,
+            'wait_for_completion' : False,
+            'timeout' : 30,
+        },
+        'restore' : {
+            'repository' : None,
+            'name' : None,
+            'indices' : None,
+            'include_aliases' : False,
+            'ignore_unavailable' : False,
+            'include_global_state' : True,
+            'partial' : False,
+            'rename_pattern' : None,
+            'rename_replacement' : None,
+            'extra_settings' : {},
+            'wait_for_completion' : True,
+            'skip_repo_fs_check' : False,
+        },
+        'snapshot' : {
+            'repository' : None,
+            'name' : 'curator-%Y%m%d%H%M%S',
+            'ignore_unavailable' : False,
+            'include_global_state' : True,
+            'partial' : False,
+            'wait_for_completion' : True,
+            'skip_repo_fs_check' : False,
+        },
+    }
+
+def index_filter():
+    return {
+        'age': {
+            'source':'name', 'direction':None, 'timestring':None, 'unit':None,
+            'unit_count':None, 'field':None, 'stats_result':'min_value',
+            'epoch':None, 'exclude':False
+        },
+        'allocated': {
+            'key':None, 'value':None, 'allocation_type':'require', 'exclude':True
+        },
+        'closed': {'exclude':True},
+        'forcemerged': {'max_num_segments':None, 'exclude':True},
+        'kibana': {'exclude':True},
+        'none': {},
+        'opened': {'exclude':True},
+        'pattern': {'kind':None, 'value':None, 'exclude':False},
+        'space': {
+            'disk_space':None, 'reverse':True, 'use_age':False,
+            'source':'creation_date', 'timestring':None, 'field':None,
+            'stats_result':'min_value', 'exclude':False,
+        },
+    }
+
+def snapshot_filter():
+    return {
+        'age': {
+            'source':'creation_date', 'direction':None, 'timestring':None,
+            'unit':None, 'unit_count':None, 'epoch':None, 'exclude':False
+        },
+        'none': {},
+        'pattern': {'kind':None, 'value':None, 'exclude':False},
+        'state': {'state':'SUCCESS', 'exclude':False}
+    }
diff --git a/curator/indexlist.py b/curator/indexlist.py
index 1aa37cf..180a445 100644
--- a/curator/indexlist.py
+++ b/curator/indexlist.py
@@ -1,10 +1,9 @@
-
 from datetime import timedelta, datetime, date
 import time
 import re
 import logging
+from .defaults import settings
 from .exceptions import *
-from .settings import *
 from .utils import *
 
 
@@ -31,11 +30,11 @@ class IndexList(object):
         self.__get_indices()
 
     def __actionable(self, idx):
-        self.loggit.info(
+        self.loggit.debug(
             'Index {0} is actionable and remains in the list.'.format(idx))
 
     def __not_actionable(self, idx):
-            self.loggit.info(
+            self.loggit.debug(
                 'Index {0} is not actionable, removing from list.'.format(idx))
             self.indices.remove(idx)
 
@@ -55,7 +54,7 @@ class IndexList(object):
                 text = "Removed from actionable list"
                 self.__not_actionable(index)
         if msg:
-            self.loggit.info('{0}: {1}'.format(text, msg))
+            self.loggit.debug('{0}: {1}'.format(text, msg))
 
     def __get_indices(self):
         """
@@ -238,7 +237,7 @@ class IndexList(object):
             elasticsearch as a date datatype.  Default: ``@timestamp``
         """
         self.loggit.debug('Getting index date from field_stats API')
-        self.loggit.info(
+        self.loggit.debug(
             'Cannot use field_stats on closed indices.  '
             'Omitting any closed indices.'
         )
@@ -292,9 +291,9 @@ class IndexList(object):
             )
 
         if kind == 'timestring':
-            regex = REGEX_MAP[kind].format(get_date_regex(value))
+            regex = settings.regex_map()[kind].format(get_date_regex(value))
         else:
-            regex = REGEX_MAP[kind].format(value)
+            regex = settings.regex_map()[kind].format(value)
 
         self.empty_list_check()
         pattern = re.compile(regex)
@@ -393,7 +392,7 @@ class IndexList(object):
                     agetest = self.index_info[index]['age'][keyfield] > PoR
                 self.__excludify(agetest, exclude, index, msg)
             except KeyError:
-                self.loggit.info(
+                self.loggit.debug(
                     'Index "{0}" does not meet provided criteria. '
                     'Removing from list.'.format(index, source))
                 self.indices.remove(index)
@@ -449,7 +448,7 @@ class IndexList(object):
         disk_usage = 0.0
         disk_limit = disk_space * 2**30
 
-        self.loggit.info(
+        self.loggit.debug(
             'Cannot get disk usage info from closed indices.  '
             'Omitting any closed indices.'
         )
@@ -491,7 +490,14 @@ class IndexList(object):
             # as the key and value, respectively
             intermediate = {}
             for index in working_list:
-                intermediate[index] = self.index_info[index]['age'][keyfield]
+                if keyfield in self.index_info[index]['age']:
+                    intermediate[index] = self.index_info[index]['age'][keyfield]
+                else:
+                    msg = (
+                        '{0} does not have age key "{1}" in IndexList '
+                        ' metadata'.format(index, keyfield)
+                    )
+                    self.__excludify(True, True, index, msg)
 
             # This will sort the indices the youngest first. Effectively, this
             # should set us up to delete everything older than fits into
@@ -514,21 +520,6 @@ class IndexList(object):
                 )
             )
             self.__excludify((disk_usage > disk_limit), exclude, index, msg)
-            # if disk_usage > disk_limit:
-            #     if exclude:
-            #         text = "Removed from actionable list"
-            #         self.__not_actionable(index)
-            #     else:
-            #         text = "Remains in actionable list"
-            #         self.__actionable(index)
-            # else:
-            #     if exclude:
-            #         text = "Remains in actionable list"
-            #         self.__actionable(index)
-            #     else:
-            #         text = "Removed from actionable list"
-            #         self.__not_actionable(index)
-            # self.loggit.info('{0}: {1}'.format(text, msg))
 
     def filter_kibana(self, exclude=True):
         """
@@ -562,7 +553,7 @@ class IndexList(object):
         self.loggit.debug('Filtering forceMerged indices')
         if not max_num_segments:
             raise MissingArgument('Missing value for "max_num_segments"')
-        self.loggit.info(
+        self.loggit.debug(
             'Cannot get segment count of closed indices.  '
             'Omitting any closed indices.'
         )
@@ -669,7 +660,7 @@ class IndexList(object):
                     self.__excludify(has_routing, exclude, index, msg)
 
     def filter_none(self):
-        self.loggit.info('"None" filter selected.  No filtering will be done.')
+        self.loggit.debug('"None" filter selected.  No filtering will be done.')
 
     def iterate_filters(self, filter_dict):
         """
@@ -715,7 +706,7 @@ class IndexList(object):
                     '{0}'.format(f['filtertype'])
                 )
             try:
-                f_args = IDX_FILTER_DEFAULTS[ft]
+                f_args = settings.index_filter()[ft]
                 method = self.__map_method(ft)
             except:
                 raise ConfigurationError(
diff --git a/curator/logtools.py b/curator/logtools.py
index 0984df4..a23af43 100644
--- a/curator/logtools.py
+++ b/curator/logtools.py
@@ -32,6 +32,10 @@ class Whitelist(logging.Filter):
     def filter(self, record):
         return any(f.filter(record) for f in self.whitelist)
 
+class Blacklist(Whitelist):
+    def filter(self, record):
+        return not Whitelist.filter(self, record)
+
 class LogInfo(object):
     def __init__(self, cfg):
         cfg['loglevel'] = 'INFO' if not 'loglevel' in cfg else cfg['loglevel']
@@ -51,19 +55,6 @@ class LogInfo(object):
                 '%(asctime)s %(levelname)-9s %(name)22s '
                 '%(funcName)22s:%(lineno)-4d %(message)s'
             )
-        else:
-            for handler in logging.root.handlers:
-                self.handler.addFilter(
-                    Whitelist(
-                        'root', '__main__', 'curator.cli', 'curator.curator',
-                        'curator.indexlist', 'curator.snapshotlist',
-                        'curator.actions.alias', 'curator.actions.allocation',
-                        'curator.actions.close', 'curator.actions.delete_indices',
-                        'curator.actions.delete_snapshots',
-                        'curator.actions.forcemerge', 'curator.actions.open',
-                        'curator.actions.replicas', 'curator.actions.snapshot',
-                    )
-                )
 
         if cfg['logformat'] == 'json' or cfg['logformat'] == 'logstash':
             self.handler.setFormatter(LogstashFormatter())
diff --git a/curator/repomgrcli.py b/curator/repomgrcli.py
index 8d34ab3..9aaf242 100644
--- a/curator/repomgrcli.py
+++ b/curator/repomgrcli.py
@@ -3,13 +3,13 @@ import click
 import re
 import sys
 import logging
-from .settings import CLIENT_DEFAULTS, LOGGING_DEFAULTS
+from .defaults import settings
 from .exceptions import *
 from .utils import *
 from ._version import __version__
 from .logtools import LogInfo
 
-logger = logging.getLogger(__name__)
+logger = logging.getLogger('curator.repomgrcli')
 
 try:
     from logging import NullHandler
@@ -24,11 +24,6 @@ def delete_callback(ctx, param, value):
     if not value:
         ctx.abort()
 
-def fix_hosts(ctx):
-    if "host" in ctx.parent.params:
-        ctx.parent.params['hosts'] = ctx.parent.params['host']
-        del ctx.parent.params['host']
-
 def show_repos(client):
     for repository in sorted(get_repository(client, '_all').keys()):
         print('{0}'.format(repository))
@@ -55,7 +50,6 @@ def fs(
     """
     Create a filesystem repository.
     """
-    fix_hosts(ctx)
     client = get_client(**ctx.parent.parent.params)
     try:
         create_repository(client, repo_type='fs', **ctx.params)
@@ -91,7 +85,6 @@ def s3(
     """
     Create an S3 repository.
     """
-    fix_hosts(ctx)
     client = get_client(**ctx.parent.parent.params)
     try:
         create_repository(client, repo_type='s3', **ctx.params)
@@ -102,24 +95,24 @@ def s3(
 
 @click.group()
 @click.option(
-    '--host', help='Elasticsearch host.', default=CLIENT_DEFAULTS['hosts'])
+    '--host', help='Elasticsearch host.', default=settings.client()['hosts'])
 @click.option(
     '--url_prefix', help='Elasticsearch http url prefix.',
-    default=CLIENT_DEFAULTS['url_prefix']
+    default=settings.client()['url_prefix']
 )
- at click.option('--port', help='Elasticsearch port.', default=CLIENT_DEFAULTS['port'], type=int)
- at click.option('--use_ssl', help='Connect to Elasticsearch through SSL.', is_flag=True, default=CLIENT_DEFAULTS['use_ssl'])
+ at click.option('--port', help='Elasticsearch port.', default=settings.client()['port'], type=int)
+ at click.option('--use_ssl', help='Connect to Elasticsearch through SSL.', is_flag=True, default=settings.client()['use_ssl'])
 @click.option('--certificate', help='Path to certificate to use for SSL validation. (OPTIONAL)', type=str, default=None)
 @click.option('--client-cert', help='Path to file containing SSL certificate for client auth. (OPTIONAL)', type=str, default=None)
 @click.option('--client-key', help='Path to file containing SSL key for client auth. (OPTIONAL)', type=str, default=None)
 @click.option('--ssl-no-validate', help='Do not validate server\'s SSL certificate', is_flag=True)
- at click.option('--http_auth', help='Use Basic Authentication ex: user:pass', default=CLIENT_DEFAULTS['http_auth'])
- at click.option('--timeout', help='Connection timeout in seconds.', default=CLIENT_DEFAULTS['timeout'], type=int)
+ at click.option('--http_auth', help='Use Basic Authentication ex: user:pass', default=settings.client()['http_auth'])
+ at click.option('--timeout', help='Connection timeout in seconds.', default=settings.client()['timeout'], type=int)
 @click.option('--master-only', is_flag=True, help='Only operate on elected master node.')
 @click.option('--debug', is_flag=True, help='Debug mode')
- at click.option('--loglevel', help='Log level', default=LOGGING_DEFAULTS['loglevel'])
- at click.option('--logfile', help='log file', default=LOGGING_DEFAULTS['logfile'])
- at click.option('--logformat', help='Log output format [default|logstash].', default=LOGGING_DEFAULTS['logformat'])
+ at click.option('--loglevel', help='Log level', default=settings.logs()['loglevel'])
+ at click.option('--logfile', help='log file', default=settings.logs()['logfile'])
+ at click.option('--logformat', help='Log output format [default|logstash].', default=settings.logs()['logformat'])
 @click.version_option(version=__version__)
 @click.pass_context
 def repo_mgr_cli(
@@ -153,7 +146,6 @@ def show(ctx):
     """
     Show all repositories
     """
-    fix_hosts(ctx)
     client = get_client(**ctx.parent.params)
     show_repos(client)
 
@@ -165,7 +157,6 @@ def show(ctx):
 @click.pass_context
 def _delete(ctx, repository):
     """Delete an Elasticsearch repository"""
-    fix_hosts(ctx)
     client = get_client(**ctx.parent.params)
     try:
         logger.info('Deleting repository {0}...'.format(repository))
diff --git a/curator/settings.py b/curator/settings.py
deleted file mode 100644
index c8289c5..0000000
--- a/curator/settings.py
+++ /dev/null
@@ -1,148 +0,0 @@
-import os
-
-# Elasticsearch versions supported
-VERSION_MAX  = (5, 1, 0)
-VERSION_MIN = (2, 0, 0)
-
-CONFIG_FILE = os.path.join(os.path.expanduser('~'), '.curator', 'curator.yml')
-
-CLIENT_DEFAULTS = {
-    'hosts': '127.0.0.1',
-    'port': 9200,
-    'url_prefix': '',
-    'http_auth': None,
-    'use_ssl': False,
-    'certificate': None,
-    'client_cert': None,
-    'client_key': None,
-    'aws_key': None,
-    'aws_secret_key': None,
-    'aws_region': None,
-    'ssl_no_validate': False,
-    'timeout': 30,
-    'master_only': False,
-}
-
-LOGGING_DEFAULTS = {
-    'loglevel': 'INFO',
-    'logfile': None,
-    'logformat': 'default',
-}
-
-OPTION_DEFAULTS = {
-    'ignore_empty_list': False,
-    'timeout_override': None,
-    'continue_if_exception': False,
-    'disable_action': False,
-}
-
-REGEX_MAP = {
-    'timestring': r'^.*{0}.*$',
-    'regex': r'{0}',
-    'prefix': r'^{0}.*$',
-    'suffix': r'^.*{0}$',
-}
-
-DATE_REGEX = {
-    'Y' : '4',
-    'y' : '2',
-    'm' : '2',
-    'W' : '2',
-    'U' : '2',
-    'd' : '2',
-    'H' : '2',
-    'M' : '2',
-    'S' : '2',
-    'j' : '3',
-}
-
-ACTION_DEFAULTS = {
-    'alias' : {
-        'name' : None,
-        'extra_settings' : {},
-    },
-    'allocation' : {
-        'key' : None,
-        'value' : None,
-        'allocation_type' : 'require',
-        'wait_for_completion' : False,
-        'timeout' : 30,
-    },
-    'close' : { 'delete_aliases' : False },
-    'create_index' : {
-        'name' : None,
-        'extra_settings' : {},
-    },
-    'delete_indices' : { 'master_timeout' : 30 },
-    'delete_snapshots' : {
-        'repository' : None,
-        'retry_interval' : 120,
-        'retry_count' : 3,
-    },
-    'forcemerge' : {
-        'delay' : 0,
-        'max_num_segments' : 2,
-    },
-    'open' : {},
-    'replicas' : {
-        'count' : None,
-        'wait_for_completion' : False,
-        'timeout' : 30,
-    },
-    'restore' : {
-        'repository' : None,
-        'name' : None,
-        'indices' : None,
-        'include_aliases' : False,
-        'ignore_unavailable' : False,
-        'include_global_state' : True,
-        'partial' : False,
-        'rename_pattern' : None,
-        'rename_replacement' : None,
-        'extra_settings' : {},
-        'wait_for_completion' : True,
-        'skip_repo_fs_check' : False,
-    },
-    'snapshot' : {
-        'repository' : None,
-        'name' : 'curator-%Y%m%d%H%M%S',
-        'ignore_unavailable' : False,
-        'include_global_state' : True,
-        'partial' : False,
-        'wait_for_completion' : True,
-        'skip_repo_fs_check' : False,
-    },
-}
-
-IDX_FILTER_DEFAULTS = {
-
-    'age': {
-        'source':'name', 'direction':None, 'timestring':None, 'unit':None,
-        'unit_count':None, 'field':None, 'stats_result':'min_value',
-        'epoch':None, 'exclude':False
-    },
-    'allocated': {
-        'key':None, 'value':None, 'allocation_type':'require', 'exclude':True
-    },
-    'closed': {'exclude':True},
-    'forcemerged': {'max_num_segments':None, 'exclude':True},
-    'kibana': {'exclude':True},
-    'none': {},
-    'opened': {'exclude':True},
-    'pattern': {'kind':None, 'value':None, 'exclude':False},
-    'space': {
-        'disk_space':None, 'reverse':True, 'use_age':False,
-        'source':'creation_date', 'timestring':None, 'field':None,
-        'stats_result':'min_value', 'exclude':False,
-    },
-}
-
-SNAP_FILTER_DEFAULTS = {
-    'age': {
-        'source':'creation_date', 'direction':None, 'timestring':None,
-        'unit':None, 'unit_count':None, 'epoch':None, 'exclude':False
-    },
-    'none': {},
-    'pattern': {'kind':None, 'value':None, 'exclude':False},
-    'state': {'state':'SUCCESS', 'exclude':False}
-}
diff --git a/curator/snapshotlist.py b/curator/snapshotlist.py
index 3632f6c..051bac3 100644
--- a/curator/snapshotlist.py
+++ b/curator/snapshotlist.py
@@ -1,10 +1,11 @@
-from .exceptions import *
-from .utils import *
 from datetime import timedelta, datetime, date
 import time
 import re
-from .settings import *
 import logging
+from .defaults import settings
+from .exceptions import *
+from .utils import *
+
 
 class SnapshotList(object):
     def __init__(self, client, repository=None):
@@ -40,11 +41,11 @@ class SnapshotList(object):
 
 
... 1213 lines suppressed ...

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/elasticsearch-curator.git



More information about the Python-modules-commits mailing list