diff --git a/.rubocop.yml b/.rubocop.yml index 874ad9b..d8afbab 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -38,4 +38,10 @@ MethodLength: Max: 50 SingleSpaceBeforeFirstArg: + Enabled: false + +Style/WordArray: + MinSize: 5 + +Style/RedundantBegin: Enabled: false \ No newline at end of file diff --git a/Berksfile b/Berksfile index 305b85c..14e4f69 100644 --- a/Berksfile +++ b/Berksfile @@ -4,14 +4,8 @@ source 'https://api.berkshelf.com' if Gem::Version.new(Berkshelf::VERSION) > Gem metadata -cookbook 'java' -cookbook 'curl' -cookbook 'ark' - -cookbook 'pleaserun', git: 'https://github.com/paulczar/chef-pleaserun.git' - group :test do - cookbook 'minitest-handler', git: 'https://github.com/btm/minitest-handler-cookbook.git' cookbook 'elasticsearch', git: 'https://github.com/elasticsearch/cookbook-elasticsearch.git' cookbook 'kibana', git: 'https://github.com/lusis/chef-kibana.git' + cookbook 'beaver' end diff --git a/CHANGELOG.md b/CHANGELOG.md index 7186b4b..28551c2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # CHANGELOG for chef-logstash +## 0.11.0 +* MAJOR - depreciate non runit service types. +* MINOR - fix bug where node['logstash'][instance_name] must exist. +* MAJOR - remove pyshipper in favor of beaver community cookbook. +* MAJOR - remove beaver in favor of community cookbook. +* MAJOR - assumes ChefDK for Development/Testing +* MAJOR - use keys from config_template hash to make templates reusable. + ## 0.10.0: * major rework of service LWRP * rework of attribute precidence @@ -24,7 +32,7 @@ _this will almost certainly break backwards compatibility_ * instance LWRP * service LWRP * pattern LWP - * config LWP + * config LWP ## 0.7.7: diff --git a/README.md b/README.md index 4662e79..9fda573 100644 --- a/README.md +++ b/README.md @@ -5,9 +5,9 @@ Description This is the semi-official 'all-in-one' Logstash cookbook. -This cookbook is in transition from being a regular cookbook to following the Library Cookbook pattern. -While you can still use the `agent` and `server` recipes, they are only used for testing and don't supply attributes -when they call the LWRPs within. The power of this cookbook now comes from the `LWRPs` being used directly. +This cookbook is primarily a library cookbook. + +While you can still use the `agent` and `server` recipes, they are not recommended as they are very limited in what they do. If you are using logstash < 1.2 you might want to use the 0.6.x branch. If you are using logstash < 1.4 you might want to use the 0.7.x branch. @@ -16,7 +16,7 @@ Requirements ============ All of the requirements are explicitly defined in the recipes. Every -effort has been made to utilize Opscode's cookbooks. +effort has been made to utilize Community Cookbooks. However if you wish to use an external ElasticSearch cluster, you will need to install that yourself and change the relevant attributes for @@ -27,8 +27,9 @@ see the Berksfile for more details * [Heavywater Graphite Cookbook](https://github.com/hw-cookbooks/graphite) - This is the one I use * [Karmi's ElasticSearch Cookbook](https://github.com/elasticsearch/cookbook-elasticsearch) -* [RiotGames RBENV cookbook](https://github.com/RiotGames/rbenv-cookbook) * [@lusis Kibana cookbook](https://github.com/lusis/chef-kibana) +* [Community Beaver cookbook](https://supermarket.getchef.com/cookbooks/beaver) +* [elkstack community cookbook](https://supermarket.getchef.com/cookbooks/elkstack) Attributes ========== @@ -39,64 +40,13 @@ see [attributes/default.rb](attributes/default.rb) ## Beaver (alternative to Logstash Agent) -_This will be deprecated soon in favor of an external library cookbook._ - -* `node['logstash']['beaver']['repo']` - URL or repository to install - beaver from (using pip). -* `node['logstash']['beaver']['server_role']` - The role of the node - behaving as a Logstash `server`/`indexer`. -* `node['logstash']['beaver']['server_ipaddress']` - Server IP address - to use (needed when not using server_role). -* `node['logstash']['beaver']['inputs']` - Array of input plugins - configuration (Supported: file). - For example: - -```ruby -override['logstash']['beaver']['inputs'] = [ - { :file => - { - :path => ["/var/log/nginx/*log"], - :type => "nginx", - :tags => ["logstash","nginx"] - } - }, - { :file => - { - :path => ["/var/log/syslog"], - :type => "syslog", - :tags => ["logstash","syslog"] - } - } -] -``` -* `node['logstash']['beaver']['outputs']` - Array of output plugins - configuration (Supported: amq, redis, stdout, zeromq). - For example: - -```ruby -override['logstash']['beaver']['outputs'] = [ - { - :amqp => { - :port => "5672", - :exchange => "rawlogs", - :name => "rawlogs_consumer" - } - } -] -``` - This example sets up the amqp output and uses the recipe defaults for the host value +no longer used. see [Community Beaver cookbook](https://supermarket.getchef.com/cookbooks/beaver) ## Source -* `node['logstash']['source']['repo']` - The git repo to use for the - source code of Logstash -* `node['logstash']['source']['sha']` - The sha/branch/tag of the repo - you wish to clone. Uses `node['logstash']['server']['version']` by - default. -* `node['logstash']['source']['java_home']` - your `JAVA_HOME` - location. Needed explicity for `ant` when building JRuby +no longer supports installing from source. -## Index Cleaner +## Index Cleaner / Curator * `node['logstash']['index_cleaner']['days_to_keep']` - Integer number of days from today of Logstash index to keep. @@ -148,10 +98,12 @@ see [resources/plugins.rb](resources/plugins.rb) We've done our best to make this intuitive and easy to use. -1. the value directly in the resource call. +1. the value directly in the resource block. 2. the value from the hash node['logstash']['instance'][name] 3. the value from the hash node['logstash']['instance_default'] +You should be able to override settings in any of the above places. It is recommended for readability that you set non-default options in the LWRP resource block. But do whichever makes sense to you. + Searching ====== @@ -163,20 +115,30 @@ Testing ## Vagrant +__depreciated in favor if test kitchen.__ + ``` -vagrant up precise64 +$ vagrant up precise64 ``` ## Rubocop, FoodCritic, Rspec, Test-Kitchen ``` -bundle exec rake +$ bundle exec rake +``` + +## Test Kitchen + +``` +$ kitchen converge server_ubuntu ``` Contributing ======== -Any and all contributions are welcome. We do ask that you test your contributions with the testing framework before you send a PR. +Any and all contributions are welcome. We do ask that you test your contributions with the testing framework before you send a PR. All contributions should be made against the development branch. We will merge in from development to master and do versioning etc then depending on the scope of the changes being pulled in. + +Please update tests and changelist with your contributions. Documentation contributions will earn you lots of hugs and kisses. @@ -190,334 +152,7 @@ These two recipes show how to install and configure logstash instances via the p * [recipes/server.rb](recipes/server.rb) - This would be your indexer node * [recipes/agent.rb](recipes/agent.rb) - This would be a local host's agent for collection - -Every attempt (and I mean this) was made to ensure that the following -objectives were met: - -* Any agent install can talk to a server install -* Kibana web interface can talk to the server install -* Each component works OOB and with each other -* Utilize official opscode cookbooks where possible - -This setup makes HEAVY use of roles. Additionally, ALL paths have been -made into attributes. Everything I could think of that would need to -be customized has been made an attribute. - -## Defaults - -By default, the recipes look for the following roles (defined as -attributes so they can be overridden): - -* `graphite_server` - `node['logstash']['graphite_role']` -* `elasticsearch_server` - `node['logstash']['elasticsearch_role']` -* `logstash_server` - - `node['logstash']['kibana']['elasticsearch_role']` and - `node['logstash']['agent']['server_role']` - -The reason for giving `kibana` its own role assignment is to allow you -to point to existing ES clusters/logstash installs. - -The reason for giving `agent` its own role assignment is to allow the -`server` and `agent` recipes to work together. - -Yes, if you have a graphite installation with a role of -`graphite_server`, logstash will send stats of events received to -`logstash.events`. - -## Agent and Server configuration - -The template to use for configuration is made an attribute as well. -This allows you to define your OWN logstash configuration file without -mucking with the default templates. - -The `server` will, by default, enable the embedded ES server. This can -be overriden as well. - -See the `server` and `agent` attributes for more details. - -## Source vs. Jar install methods - -Both `agent` and `server` support an attribute for how to install. By -default this is set to `jar` to use the 1.1.1preview as it is required -to use elasticsearch 0.19.4. The current release is defined in -attributes if you choose to go the `source` route. - -## Out of the box behaviour - -Here are some basic steps - -* Create a role called `logstash_server` and assign it the following - recipes: `logstash::server` -* Assign the role to a new server -* Assign the `logstash::agent` recipe to another server - -If there is a system found with the `logstash_server` role, the agent -will automatically configure itself to send logs to it over tcp port -5959. This is, not coincidently, the port used by the chef logstash -handler. - -If there is NOT a system with the `logstash_server` role, the agent -will use a null output. The default input is to read files from -`/var/log/*.log` excluding and gzipped files. - -If you point your browser to the `logstash_server` system's ip -address, you should get the kibana web interface. - -Do something to generate a new line in any of the files in the agent's -watch path (I like to SSH to the host), and the events will start -showing up in kibana. You might have to issue a fresh empty search. - -The `pyshipper` recipe will work as well but it is NOT wired up to -anything yet. - -## config templates - -If you want to use chef templates to drive your configs you'll want to set the following: - -* example using `agent`, `server` works the same way. -* The actual template file for the following would resolve to `templates/default/apache.conf.erb` and be installed to `/opt/logstash/agent/etc/conf.d/apache.conf` -* Each template has a hash named for it to inject variables in `node['logstash']['agent']['config_templates_variables']` - - -``` -node.set['logstash']['agent']['config_file'] = "" # disable data drive templates ( can be left enabled if want both ) -node.set['logstash']['agent']['config_templates'] = { "apache" => "apache.conf.erb" } -node.set['logstash']['agent']['config_templates_cookbook'] = 'logstash' -node.set['logstash']['agent']['config_templates_variables'] = { apache: { type: 'apache' } } -``` - - - - -## Letting data drive your templates - -*DEPRECATED!* - -While this may work ... it is no longer being actively supported by the maintainers of this cookbook. -We will accept `PRs`. - -The current templates for the agent and server are written so that you -can provide ruby hashes in your roles that map to inputs, filters, and -outputs. Here is a role for logstash_server. - -There are two formats for the hashes for filters and outputs that you should be aware of ... - -### Legacy - -This is for logstash < 1.2.0 and uses the old pattern of setting 'type' and 'tags' in the plugin to determine if it should be run. - -```json -filters: [ - grok: { - type: "syslog" - match: [ - "message", - "%{SYSLOGTIMESTAMP:timestamp} %{IPORHOST:host} (?:%{PROG:program}(?:\[%{POSINT:pid}\])?: )?%{GREEDYDATA:message}" - ] - }, - date: { - type: "syslog" - match: [ - "timestamp", - "MMM d HH:mm:ss", - "MMM dd HH:mm:ss", - "ISO8601" - ] - } -] -``` - -### Conditional - -This is for logstash >= 1.2.0 and uses the new pattern of conditionals `if 'type' == "foo" {}` - -Note: the condition applies to all plugins in the block hash in the same object. - -```json -filters: [ - { - condition: 'if [type] == "syslog"', - block: { - grok: { - match: [ - "message", - "%{SYSLOGTIMESTAMP:timestamp} %{IPORHOST:host} (?:%{PROG:program}(?:\[%{POSINT:pid}\])?: )?%{GREEDYDATA:message}" - ] - }, - date: { - match: [ - "timestamp", - "MMM d HH:mm:ss", - "MMM dd HH:mm:ss", - "ISO8601" - ] - } - } - } -] -``` - -### Examples - -These examples show the legacy format and need to be updated for logstash >= 1.2.0 - - name "logstash_server" - description "Attributes and run_lists specific to FAO's logstash instance" - default_attributes( - :logstash => { - :server => { - :enable_embedded_es => false, - :inputs => [ - :rabbitmq => { - :type => "all", - :host => "", - :exchange => "rawlogs" - } - ], - :filters => [ - :grok => { - :type => "haproxy", - :pattern => "%{HAPROXYHTTP}", - :patterns_dir => '/opt/logstash/server/etc/patterns/' - } - ], - :outputs => [ - :file => { - :type => 'haproxy', - :path => '/opt/logstash/server/haproxy_logs/%{request_header_host}.log', - :message_format => '%{client_ip} - - [%{accept_date}] "%{http_request}" %{http_status_code} ....' - } - ] - } - } - ) - run_list( - "role[elasticsearch_server]", - "recipe[logstash::server]" - ) - - -It will produce the following logstash.conf file - - input { - - amqp { - exchange => 'rawlogs' - host => '' - name => 'rawlogs_consumer' - type => 'all' - } - } - - filter { - - grok { - pattern => '%{HAPROXYHTTP}' - patterns_dir => '/opt/logstash/server/etc/patterns/' - type => 'haproxy' - } - } - - output { - stdout { debug => true debug_format => "json" } - elasticsearch { host => "127.0.0.1" cluster => "logstash" } - - file { - message_format => '%{client_ip} - - [%{accept_date}] "%{http_request}" %{http_status_code} ....' - path => '/opt/logstash/server/haproxy_logs/%{request_header_host}.log' - type => 'haproxy' - } - } - -Here is an example using multiple filters - - default_attributes( - :logstash => { - :server => { - :filters => [ - { :grep => { - :type => 'tomcat', - :match => { '@message' => '([Ee]xception|Failure:|Error:)' }, - :add_tag => 'exception', - :drop => false - } }, - { :grep => { - :type => 'tomcat', - :match => { '@message' => 'Unloading class ' }, - :add_tag => 'unloading-class', - :drop => false - } }, - { :multiline => { - :type => 'tomcat', - :pattern => '^\s', - :what => 'previous' - } } - ] - } - } - ) - -It will produce the following logstash.conf file - - filter { - - grep { - add_tag => 'exception' - drop => false - match => ['@message', '([Ee]xception|Failure:|Error:)'] - type => 'tomcat' - } - - grep { - add_tag => 'unloading-class' - drop => false - match => ["@message", "Unloading class "] - type => 'tomcat' - } - - multiline { - patterns_dir => '/opt/logstash/patterns' - pattern => '^\s' - type => 'tomcat' - what => 'previous' - } - - } - -## Adding grok patterns - -Grok pattern files can be generated using attributes as follows - - default_attributes( - :logstash => { - :patterns => { - :apache => { - :HTTP_ERROR_DATE => '%{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}', - :APACHE_LOG_LEVEL => '[A-Za-z][A-Za-z]+', - :ERRORAPACHELOG => '^\[%{HTTP_ERROR_DATE:timestamp}\] \[%{APACHE_LOG_LEVEL:level}\](?: \[client %{IPORHOST:clientip}\])?', - }, - :mywebapp => { - :MYWEBAPP_LOG => '\[mywebapp\]', - }, - }, - [...] - } - ) - -This will generate the following files: - -`/opt/logstash/server/etc/patterns/apache` - - APACHE_LOG_LEVEL [A-Za-z][A-Za-z]+ - ERRORAPACHELOG ^\[%{HTTP_ERROR_DATE:timestamp}\] \[%{APACHE_LOG_LEVEL:level}\](?: \[client %{IPORHOST:clientip}\])? - HTTP_ERROR_DATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR} - -`/opt/logstash/server/etc/patterns/mywebapp` - - MYWEBAPP_LOG \[mywebapp\] - -This patterns will be included by default in the grok and multiline -filters. +See the [elkstack community cookbook](https://supermarket.getchef.com/cookbooks/elkstack) for a great example of using the LWRPs provided by this cookbook. # Vagrant @@ -540,10 +175,6 @@ Run Logstash on Centos 6 32bit : `vagrant up centos6_32` Logstash will listen for syslog messages on tcp/5140 -# BIG WARNING - -* Currently only tested on Ubuntu Natty, Precise, and RHEL 6.2. - ## License and Author - Author: John E. Vincent diff --git a/Rakefile b/Rakefile index c649ea3..50610f2 100644 --- a/Rakefile +++ b/Rakefile @@ -28,7 +28,7 @@ end require 'rspec/core/rake_task' desc 'Run ChefSpec unit tests' RSpec::Core::RakeTask.new(:spec) do |t, _args| - t.rspec_opts = 'test/unit/spec' + t.rspec_opts = 'test/unit' end # The default rake task should just run it all diff --git a/attributes/beaver.rb b/attributes/beaver.rb deleted file mode 100644 index f1dbcce..0000000 --- a/attributes/beaver.rb +++ /dev/null @@ -1,13 +0,0 @@ -# Encoding: utf-8 -default['logstash']['beaver']['log_file'] = '/var/log/logstash/beaver.log' -default['logstash']['beaver']['pip_package'] = 'beaver==22' -default['logstash']['beaver']['pika']['pip_package'] = 'pika==0.9.8' -default['logstash']['beaver']['zmq']['pip_package'] = 'pyzmq==2.1.11' -default['logstash']['beaver']['server_role'] = 'logstash_server' -default['logstash']['beaver']['server_ipaddress'] = nil -default['logstash']['beaver']['inputs'] = [] -default['logstash']['beaver']['outputs'] = [] -default['logstash']['beaver']['format'] = 'json' - -default['logstash']['beaver']['logrotate']['options'] = %w(missingok notifempty compress copytruncate) -default['logstash']['beaver']['logrotate']['postrotate'] = 'invoke-rc.d logstash_beaver force-reload >/dev/null 2>&1 || true' diff --git a/attributes/default.rb b/attributes/default.rb index 92c9b79..8a27295 100644 --- a/attributes/default.rb +++ b/attributes/default.rb @@ -8,6 +8,7 @@ default['logstash']['instance_default']['elasticsearch_cluster'] = 'logstash' default['logstash']['instance_default']['elasticsearch_ip'] = '' default['logstash']['instance_default']['elasticsearch_port'] = '' +default['logstash']['instance_default']['elasticsearch_embedded'] = true default['logstash']['instance_default']['graphite_ip'] = '' # Default logstash instance variables @@ -24,7 +25,7 @@ default['logstash']['instance_default']['version'] = '1.4.1' default['logstash']['instance_default']['source_url'] = 'https://download.elasticsearch.org/logstash/logstash/logstash-1.4.1.tar.gz' default['logstash']['instance_default']['checksum'] = 'a1db8eda3d8bf441430066c384578386601ae308ccabf5d723df33cee27304b4' -default['logstash']['instance_default']['install_type'] = 'tarball' +default['logstash']['instance_default']['install_type'] = 'tarball' # support for java was depreciated. default['logstash']['instance_default']['plugins_version'] = '1.4.1' default['logstash']['instance_default']['plugins_source_url'] = 'https://download.elasticsearch.org/logstash/logstash/logstash-contrib-1.4.1.tar.gz' @@ -54,9 +55,13 @@ default['logstash']['instance_default']['config_templates_cookbook'] = 'logstash' default['logstash']['instance_default']['config_templates_variables'] = {} -default['logstash']['instance_default']['init_method'] = 'native' # pleaserun or native or runit +default['logstash']['instance_default']['init_method'] = 'runit' default['logstash']['instance_default']['service_templates_cookbook'] = 'logstash' +# default locations for runit templates +default['logstash']['instance_default']['runit_run_template_name'] = 'logstash' +default['logstash']['instance_default']['runit_log_template_name'] = 'logstash' + # roles/flags for various autoconfig/discovery components default['logstash']['instance_default']['enable_embedded_es'] = false default['logstash']['instance_default']['bind_host_interface'] = '' @@ -79,7 +84,11 @@ default['logstash']['instance_default']['logrotate_use_filesize'] = false # Curator +default['logstash']['instance_default']['curator_bin_dir'] = '/usr/local/bin' default['logstash']['instance_default']['curator_days_to_keep'] = 31 default['logstash']['instance_default']['curator_cron_minute'] = '0' default['logstash']['instance_default']['curator_cron_hour'] = '*' default['logstash']['instance_default']['curator_cron_log_file'] = '/dev/null' + +# Make sure instance key exists +default['logstash']['instance'] diff --git a/attributes/pyshipper.rb b/attributes/pyshipper.rb deleted file mode 100644 index 31f46a1..0000000 --- a/attributes/pyshipper.rb +++ /dev/null @@ -1 +0,0 @@ -# Encoding: utf-8 diff --git a/files/default/logstash_index_cleaner.py b/files/default/logstash_index_cleaner.py deleted file mode 100644 index 64c9b6c..0000000 --- a/files/default/logstash_index_cleaner.py +++ /dev/null @@ -1,153 +0,0 @@ -#!/usr/bin/env python -# -# Deletes all indices with a datestamp older than "days-to-keep" for daily -# if you have hourly indices, it will delete all of those older than "hours-to-keep" -# -# This script presumes an index is named typically, e.g. logstash-YYYY.MM.DD -# It will work with any name-YYYY.MM.DD or name-YYYY.MM.DD.HH type sequence -# -# Requires python and the following dependencies (all pip/easy_installable): -# -# pyes (python elasticsearch bindings, which might need simplejson) -# argparse (built-in in python2.7 and higher, python 2.6 and lower will have to easy_install it) -# -# TODO: Proper logging instead of just print statements, being able to configure a decent logging level. -# Unit tests. The code is somewhat broken up into logical parts that may be tested separately. -# Better error reporting? -# Improve the get_index_epoch method to parse more date formats. Consider renaming (to "parse_date_to_timestamp"?) - -import sys -import time -import argparse -from datetime import timedelta - -import pyes - - -__version__ = '0.1.1' - - -def make_parser(): - """ Creates an ArgumentParser to parse the command line options. """ - parser = argparse.ArgumentParser(description='Delete old logstash indices from Elasticsearch.') - - parser.add_argument('-v', '--version', action='version', version='%(prog)s '+__version__) - - parser.add_argument('--host', help='Elasticsearch host.', default='localhost') - parser.add_argument('--port', help='Elasticsearch port', default=9200, type=int) - parser.add_argument('-t', '--timeout', help='Elasticsearch timeout', default=30, type=int) - - parser.add_argument('-p', '--prefix', help='Prefix for the indices. Indices that do not have this prefix are skipped.', default='logstash-') - parser.add_argument('-s', '--separator', help='Time unit separator', default='.') - - parser.add_argument('-H', '--hours-to-keep', action='store', help='Number of hours to keep.', type=int) - parser.add_argument('-d', '--days-to-keep', action='store', help='Number of days to keep.', type=int) - - parser.add_argument('-n', '--dry-run', action='store_true', help='If true, does not perform any changes to the Elasticsearch indices.', default=False) - - return parser - - -def get_index_epoch(index_timestamp, separator='.'): - """ Gets the epoch of the index. - - :param index_timestamp: A string on the format YYYY.MM.DD[.HH] - :return The creation time (epoch) of the index. - """ - year_month_day_optionalhour = index_timestamp.split(separator) - if len(year_month_day_optionalhour) == 3: - year_month_day_optionalhour.append('3') - - return time.mktime([int(part) for part in year_month_day_optionalhour] + [0,0,0,0,0]) - - -def find_expired_indices(connection, days_to_keep=None, hours_to_keep=None, separator='.', prefix='logstash-', out=sys.stdout, err=sys.stderr): - """ Generator that yields expired indices. - - :return: Yields tuples on the format ``(index_name, expired_by)`` where index_name - is the name of the expired index and expired_by is the number of seconds (a float value) that the - index was expired by. - """ - utc_now_time = time.time() + time.altzone - days_cutoff = utc_now_time - days_to_keep * 24 * 60 * 60 if days_to_keep is not None else None - hours_cutoff = utc_now_time - hours_to_keep * 60 * 60 if hours_to_keep is not None else None - - for index_name in sorted(set(connection.indices.get_indices().keys())): - if not index_name.startswith(prefix): - print >> out, 'Skipping index due to missing prefix {0}: {1}'.format(prefix, index_name) - continue - - unprefixed_index_name = index_name[len(prefix):] - - # find the timestamp parts (i.e ['2011', '01', '05'] from '2011.01.05') using the configured separator - parts = unprefixed_index_name.split(separator) - - # perform some basic validation - if len(parts) < 3 or len(parts) > 4 or not all([item.isdigit() for item in parts]): - print >> err, 'Could not find a valid timestamp from the index: {0}'.format(index_name) - continue - - # find the cutoff. if we have more than 3 parts in the timestamp, the timestamp includes the hours and we - # should compare it to the hours_cutoff, otherwise, we should use the days_cutoff - cutoff = hours_cutoff - if len(parts) == 3: - cutoff = days_cutoff - - # but the cutoff might be none, if the current index only has three parts (year.month.day) and we're only - # removing hourly indices: - if cutoff is None: - print >> out, 'Skipping {0} because it is of a type (hourly or daily) that I\'m not asked to delete.'.format(index_name) - continue - - index_epoch = get_index_epoch(unprefixed_index_name) - - # if the index is older than the cutoff - if index_epoch < cutoff: - yield index_name, cutoff-index_epoch - - else: - print >> out, '{0} is {1} above the cutoff.'.format(index_name, timedelta(seconds=index_epoch-cutoff)) - - -def main(): - start = time.time() - - parser = make_parser() - arguments = parser.parse_args() - - if not arguments.hours_to_keep and not arguments.days_to_keep: - print >> sys.stderr, 'Invalid arguments: You must specify either the number of hours or the number of days to keep.' - parser.print_help() - return - - connection = pyes.ES('{0}:{1}'.format(arguments.host, arguments.port), timeout=arguments.timeout) - - if arguments.days_to_keep: - print 'Deleting daily indices older than {0} days.'.format(arguments.days_to_keep) - if arguments.hours_to_keep: - print 'Deleting hourly indices older than {0} hours.'.format(arguments.hours_to_keep) - - print '' - - for index_name, expired_by in find_expired_indices(connection, arguments.days_to_keep, arguments.hours_to_keep, arguments.separator, arguments.prefix): - expiration = timedelta(seconds=expired_by) - - if arguments.dry_run: - print 'Would have attempted deleting index {0} because it is {1} older than the calculated cutoff.'.format(index_name, expiration) - continue - - print 'Deleting index {0} because it was {1} older than cutoff.'.format(index_name, expiration) - - deletion = connection.indices.delete_index_if_exists(index_name) - # ES returns a dict on the format {u'acknowledged': True, u'ok': True} on success. - if deletion.get('ok'): - print 'Successfully deleted index: {0}'.format(index_name) - else: - print 'Error deleting index: {0}. ({1})'.format(index_name, deletion) - - print '' - print 'Done in {0}.'.format(timedelta(seconds=time.time()-start)) - - -if __name__ == '__main__': - main() diff --git a/libraries/logstash_util.rb b/libraries/logstash_util.rb index 8fd063d..2da0804 100644 --- a/libraries/logstash_util.rb +++ b/libraries/logstash_util.rb @@ -28,6 +28,7 @@ def self.get_ip_for_node(node, interface) end def self.get_attribute_or_default(node, instance_name, attribute_name) + instance_attr = {} unless node['logstash']['instance'][instance_name] instance_attr = deep_fetch(node, 'logstash', 'instance', instance_name, attribute_name) default_attr = deep_fetch(node, 'logstash', 'instance_default', attribute_name) instance_attr || default_attr diff --git a/metadata.rb b/metadata.rb index b36665d..0758f4b 100644 --- a/metadata.rb +++ b/metadata.rb @@ -5,16 +5,16 @@ license 'Apache 2.0' description 'Installs/Configures logstash' long_description IO.read(File.join(File.dirname(__FILE__), 'README.md')) -version '0.10.0' +version '0.11.0' %w(ubuntu debian redhat centos scientific amazon fedora).each do |os| supports os end -%w(build-essential runit git ant java logrotate python ark).each do |ckbk| +%w(build-essential runit git ant java logrotate python ark curl).each do |ckbk| depends ckbk end -%w(apt).each do |ckbk| +%w(apt elasticsearch beaver).each do |ckbk| recommends ckbk end diff --git a/providers/config.rb b/providers/config.rb index e7647dd..bcd7129 100644 --- a/providers/config.rb +++ b/providers/config.rb @@ -33,8 +33,8 @@ def load_current_resource action :create do conf = conf_vars # Chef::Log.info("config vars: #{conf.inspect}") - conf[:templates].each do |_template, file| - tp = template "#{conf[:path]}/#{::File.basename(file).chomp(::File.extname(file))}" do + conf[:templates].each do |template, file| + tp = template "#{conf[:path]}/#{::File.basename(template).chomp(::File.extname(template))}" do source file cookbook conf[:templates_cookbook] owner conf[:owner] diff --git a/providers/curator.rb b/providers/curator.rb index bebb074..28c7731 100644 --- a/providers/curator.rb +++ b/providers/curator.rb @@ -17,6 +17,7 @@ def load_current_resource @hour = new_resource.hour || Logstash.get_attribute_or_default(node, @instance, 'curator_cron_hour') @log_file = new_resource.log_file || Logstash.get_attribute_or_default(node, @instance, 'curator_cron_log_file') @user = new_resource.user || Logstash.get_attribute_or_default(node, @instance, 'user') + @bin_dir = new_resource.user || Logstash.get_attribute_or_default(node, @instance, 'curator_bin_dir') end action :create do @@ -26,6 +27,7 @@ def load_current_resource cur_hour = @hour cur_minute = @minute cur_user = @user + cur_bin_dir = @bin_dir @run_context.include_recipe 'python::pip' @@ -36,7 +38,7 @@ def load_current_resource server_ip = ::Logstash.service_ip(node, cur_instance, 'elasticsearch') cr = cron "curator-#{cur_instance}" do - command "curator --host #{server_ip} delete --older-than #{cur_days_to_keep} &> #{cur_log_file}" + command "#{cur_bin_dir}/curator --host #{server_ip} delete --older-than #{cur_days_to_keep} &> #{cur_log_file}" user cur_user minute cur_minute hour cur_hour @@ -52,16 +54,17 @@ def load_current_resource cur_hour = @hour cur_minute = @minute cur_user = @user + cur_bin_dir = @bin_dir @run_context.include_recipe 'python::pip' pi = python_pip 'elasticsearch-curator' do - action :install + action :uninstall end new_resource.updated_by_last_action(pi.updated_by_last_action?) cr = cron "curator-#{cur_instance}" do - command "curator --host #{::Logstash.service_ip(node, cur_instance, 'elasticsearch')} delete --older-than #{cur_days_to_keep} &> #{cur_log_file}" + command "#{cur_bin_dir}/curator --host #{::Logstash.service_ip(node, cur_instance, 'elasticsearch')} delete --older-than #{cur_days_to_keep} 2> #{cur_log_file} > #{cur_log_file}" user cur_user minute cur_minute hour cur_hour diff --git a/providers/install.rb b/providers/install.rb deleted file mode 100644 index 690f72e..0000000 --- a/providers/install.rb +++ /dev/null @@ -1,232 +0,0 @@ -# Encoding: utf-8 -# Cookbook Name:: logstash -# Provider:: instance -# Author:: John E. Vincent -# License:: Apache 2.0 -# -# Copyright 2014, John E. Vincent - -require 'chef/mixin/shell_out' -require 'chef/mixin/language' -include Chef::Mixin::ShellOut - -def load_current_resource - @name = new_resource.name || 'default' - @base_directory = new_resource.base_directory || Logstash.get_attribute_or_default(node, @name, 'basedir') - @install_type = new_resource.install_type || Logstash.get_attribute_or_default(node, @name, 'install_type') - @version = new_resource.version || Logstash.get_attribute_or_default(node, @name, 'version') - @checksum = new_resource.checksum || Logstash.get_attribute_or_default(node, @name, 'checksum') - @source_url = new_resource.source_url || Logstash.get_attribute_or_default(node, @name, 'source_url') - @repo = new_resource.repo - @sha = new_resource.sha - @java_home = new_resource.java_home - @user = new_resource.user || Logstash.get_attribute_or_default(node, @name, 'user') - @group = new_resource.group || Logstash.get_attribute_or_default(node, @name, 'group') - @useropts = new_resource.user_opts || Logstash.get_attribute_or_default(node, @name, 'user_opts') - @install_dir = "#{@base_directory}/application/logstash-#{@version}".clone -end - -action :delete do - ls = ls_vars - - idr = directory ls[:install_dir] do - recursive true - action :delete - end - new_resource.updated_by_last_action(idr.updated_by_last_action?) -end - -action :create do - ls = ls_vars - - ur = user ls[:user] do - home ls[:homedir] - system true - action :create - manage_home true - uid ls[:uid] - end - new_resource.updated_by_last_action(ur.updated_by_last_action?) - - gr = group ls[:group] do - gid ls[:gid] - members ls[:user] - append true - system true - end - new_resource.updated_by_last_action(gr.updated_by_last_action?) - - case @install_type - when 'tarball' - @run_context.include_recipe 'ark::default' - arkit = ark ls[:name] do - url ls[:source_url] - checksum ls[:checksum] - owner ls[:user] - group ls[:group] - mode 0755 - version ls[:version] - path ls[:basedir] - action :put - end - new_resource.updated_by_last_action(arkit.updated_by_last_action?) - - %w(bin etc lib log tmp etc/conf.d patterns).each do |ldir| - r = directory "#{ls[:instance_dir]}/#{ldir}" do - action :create - mode '0755' - owner ls[:user] - group ls[:group] - end - new_resource.updated_by_last_action(r.updated_by_last_action?) - end - - when 'jar' - bdr = directory @base_directory do - action :create - mode '0755' - owner ls[:user] - group ls[:group] - end - new_resource.updated_by_last_action(bdr.updated_by_last_action?) - - idr = directory ls[:instance_dir] do - action :create - mode '0755' - owner ls[:user] - group ls[:group] - end - new_resource.updated_by_last_action(idr.updated_by_last_action?) - - %w(bin etc lib log tmp etc/conf.d patterns).each do |ldir| - r = directory "#{ls[:instance_dir]}/#{ldir}" do - action :create - mode '0755' - owner ls[:user] - group ls[:group] - end - new_resource.updated_by_last_action(r.updated_by_last_action?) - end - - rfr = remote_file "#{ls[:instance_dir]}/lib/logstash-#{ls[:version]}.jar" do - owner ls[:user] - group ls[:group] - mode '0755' - source ls[:source_url] - checksum ls[:checksum] - end - new_resource.updated_by_last_action(rfr.updated_by_last_action?) - - lr = link "#{ls[:instance_dir]}/lib/logstash.jar" do - to "#{ls[:instance_dir]}/lib/logstash-#{ls[:version]}.jar" - only_if { new_resource.auto_symlink } - end - new_resource.updated_by_last_action(lr.updated_by_last_action?) - - when 'source' - bdr = directory @base_directory do - action :create - mode '0755' - owner ls[:user] - group ls[:group] - end - new_resource.updated_by_last_action(bdr.updated_by_last_action?) - - idr = directory ls[:instance_dir] do - action :create - mode '0755' - owner ls[:user] - group ls[:group] - end - new_resource.updated_by_last_action(idr.updated_by_last_action?) - - %w(bin etc lib log tmp etc/conf.d patterns).each do |ldir| - r = directory "#{ls[:instance_dir]}/#{ldir}" do - action :create - mode '0755' - owner ls[:user] - group ls[:group] - end - new_resource.updated_by_last_action(r.updated_by_last_action?) - end - - sd = directory "#{ls[:instance_dir]}/source" do - action :create - owner ls[:user] - group ls[:group] - mode '0755' - end - new_resource.updated_by_last_action(sd.updated_by_last_action?) - - gr = git "#{ls[:instance_dir]}/source" do - repository @repo - reference @sha - action :sync - user ls[:user] - group ls[:group] - end - new_resource.updated_by_last_action(gr.updated_by_last_action?) - - source_version = @sha || "v#{@version}" - er = execute 'build-logstash' do - cwd "#{ls[:instance_dir]}/source" - environment(JAVA_HOME: @java_home) - user ls_user # Changed from root cause building as root...WHA? - command "make clean && make VERSION=#{source_version} jar" - action :run - creates "#{ls[:instance_dir]}/source/build/logstash-#{source_version}--monolithic.jar" - not_if "test -f #{ls[:instance_dir]}/source/build/logstash-#{source_version}--monolithic.jar" - end - new_resource.updated_by_last_action(er.updated_by_last_action?) - lr = link "#{ls[:instance_dir]}/lib/logstash.jar" do - to "#{ls[:instance_dir]}/source/build/logstash-#{source_version}--monolithic.jar" - only_if { new_resource.auto_symlink } - end - new_resource.updated_by_last_action(lr.updated_by_last_action?) - else - Chef::Application.fatal!("Unknown install type: #{@install_type}") - end - logrotate(ls) if ls[:logrotate_enable] - -end - -private - -def logrotate(ls) - name = ls[:name] - - @run_context.include_recipe 'logrotate::default' - - logrotate_app "logstash_#{name}" do - path "#{ls[:homedir]}/log/*.log" - size ls[:logrotate_size] if ls[:logrotate_use_filesize] - frequency ls[:logrotate_frequency] - rotate ls[:logrotate_max_backup] - options ls[:logrotate_options] - create "664 #{ls[:user]} #{ls[:group]}" - end -end - -def ls_vars - ls = { - homedir: @useropts[:homedir], - uid: @useropts[:uid], - gid: @useropts[:gid], - source_url: @source_url, - version: @version, - checksum: @checksum, - basedir: @base_directory, - user: @user, - group: @group, - name: @name, - instance_dir: @instance_dir, - enable_logrotate: @enable_logrotate, - logrotate_size: @logrotate_size, - logrotate_use_filesize: @logrotate_use_filesize, - logrotate_frequency: @logrotate_frequency, - logrotate_max_backup: @logrotate_max_backup, - logrotate_options: @logrotate_options, - logrotate_enable: @logrotate_enable - } - ls -end diff --git a/providers/service.rb b/providers/service.rb index 2b1e8a9..97470c7 100644 --- a/providers/service.rb +++ b/providers/service.rb @@ -32,45 +32,30 @@ def load_current_resource @debug = Logstash.get_attribute_or_default(node, @instance, 'debug') @install_type = Logstash.get_attribute_or_default(node, @instance, 'install_type') @supervisor_gid = Logstash.get_attribute_or_default(node, @instance, 'supervisor_gid') + @runit_run_template_name = Logstash.get_attribute_or_default(node, @instance, 'runit_run_template_name') + @runit_log_template_name = Logstash.get_attribute_or_default(node, @instance, 'runit_log_template_name') end -use_inline_resources - action :restart do - service_action(:restart) + new_resource.updated_by_last_action(service_action(:restart)) end action :start do - service_action(:start) + new_resource.updated_by_last_action(service_action(:start)) end action :stop do - service_action(:stop) + new_resource.updated_by_last_action(service_action(:stop)) end action :reload do - service_action(:reload) + new_resource.updated_by_last_action(service_action(:reload)) end action :enable do svc = svc_vars Chef::Log.info("Using init method #{svc[:method]} for #{svc[:service_name]}") case svc[:method] - when 'pleaserun' - @run_context.include_recipe 'pleaserun::default' - pr = pleaserun svc[:service_name] do - name svc[:service_name] - program svc[:command] - args default_args - description svc[:description] - chdir svc[:chdir] - user svc[:user] - group svc[:group] - action :create - not_if { ::File.exist?("/etc/init.d/#{svc[:service_name]}") } - end - new_resource.updated_by_last_action(pr.updated_by_last_action?) - when 'runit' @run_context.include_recipe 'runit::default' ri = runit_service svc[:service_name] do @@ -92,10 +77,15 @@ def load_current_resource web_port: svc[:web_port] ) cookbook svc[:templates_cookbook] + run_template_name svc[:runit_run_template_name] + log_template_name svc[:runit_log_template_name] end new_resource.updated_by_last_action(ri.updated_by_last_action?) when 'native' + Chef::Log.warn("Using any init method other than runit is depreciated. It is + recommended that you write your own service resources in your wrapper cookbook + if the default runit is not suitable.") native_init = ::Logstash.determine_native_init(node) args = default_args @@ -121,7 +111,8 @@ def load_current_resource debug: svc[:debug], log_file: svc[:log_file], workers: svc[:workers], - supervisor_gid: svc[:supervisor_gid] + supervisor_gid: svc[:supervisor_gid], + upstart_with_sudo: svc[:upstart_with_sudo] ) notifies :restart, "service[#{svc[:service_name]}]", :delayed end @@ -226,9 +217,12 @@ def service_action(action) else sv.provider(Chef::Provider::Service::Init) end - sv.run_action(action) - new_resource.updated_by_last_action(sv.updated_by_last_action?) + when 'runit' + @run_context.include_recipe 'runit::default' + sv = runit_service svc[:service_name] end + sv.run_action(action) + sv.updated_by_last_action? end def svc_vars @@ -251,7 +245,9 @@ def svc_vars debug: @debug, install_type: @install_type, supervisor_gid: @supervisor_gid, - templates_cookbook: @templates_cookbook + templates_cookbook: @templates_cookbook, + runit_run_template_name: @runit_run_template_name, + runit_log_template_name: @runit_log_template_name } svc end diff --git a/recipes/agent.rb b/recipes/agent.rb index dfa8e4d..d73aada 100644 --- a/recipes/agent.rb +++ b/recipes/agent.rb @@ -7,8 +7,6 @@ name = 'agent' -Chef::Application.fatal!("attribute hash node['logstash']['instance']['#{name}'] must exist.") if node['logstash']['instance'][name].nil? - # these should all default correctly. listing out for example. logstash_instance name do action :create diff --git a/recipes/beaver.rb b/recipes/beaver.rb index 4b1a5bb..0dad9f4 100644 --- a/recipes/beaver.rb +++ b/recipes/beaver.rb @@ -4,205 +4,6 @@ # Recipe:: beaver # # -include_recipe 'logstash::default' -include_recipe 'python::default' -include_recipe 'logrotate' - -if node['logstash']['agent']['install_zeromq'] - include_recipe 'logstash::zero_mq_repo' - node['logstash']['zeromq_packages'].each { |p| package p } - python_pip node['logstash']['beaver']['zmq']['pip_package'] do - action :install - end -end - -package 'git' - -basedir = node['logstash']['basedir'] + '/beaver' - -conf_file = "#{basedir}/etc/beaver.conf" -format = node['logstash']['beaver']['format'] -log_file = node['logstash']['beaver']['log_file'] -pid_file = "#{node['logstash']['pid_dir']}/logstash_beaver.pid" - -logstash_server_ip = nil -if Chef::Config[:solo] - logstash_server_ip = node['logstash']['beaver']['server_ipaddress'] if node['logstash']['beaver']['server_ipaddress'] -elsif node['logstash']['beaver']['server_ipaddress'] - logstash_server_ip = node['logstash']['beaver']['server_ipaddress'] -elsif node['logstash']['beaver']['server_role'] - logstash_server_results = search(:node, "roles:#{node['logstash']['beaver']['server_role']}") - logstash_server_ip = logstash_server_results[0]['ipaddress'] unless logstash_server_results.empty? -end - -# create some needed directories and files -directory basedir do - owner node['logstash']['user'] - group node['logstash']['group'] - recursive true -end - -[ - File.dirname(conf_file), - File.dirname(log_file), - File.dirname(pid_file) -].each do |dir| - directory dir do - owner node['logstash']['user'] - group node['logstash']['group'] - recursive true - not_if { ::File.exist?(dir) } - end -end - -[log_file, pid_file].each do |f| - file f do - action :touch - owner node['logstash']['user'] - group node['logstash']['group'] - mode '0640' - end -end - -python_pip node['logstash']['beaver']['pika']['pip_package'] do - action :install -end - -python_pip node['logstash']['beaver']['pip_package'] do - action :install -end -# inputs -files = [] -node['logstash']['beaver']['inputs'].each do |ins| - ins.each do |name, hash| - case name - when 'file' then - if hash.key?('path') - files << hash - else - log('input file has no path.') { level :warn } - end - else - log("input type not supported: #{name}") { level :warn } - end - end -end - -# outputs -outputs = [] -conf = {} -node['logstash']['beaver']['outputs'].each do |outs| - outs.each do |name, hash| - case name - when 'rabbitmq', 'amqp' then - outputs << 'rabbitmq' - conf['rabbitmq_host'] = hash['host'] || logstash_server_ip || 'localhost' - conf['rabbitmq_port'] = hash['port'] if hash.key?('port') - conf['rabbitmq_vhost'] = hash['vhost'] if hash.key?('vhost') - conf['rabbitmq_username'] = hash['user'] if hash.key?('user') - conf['rabbitmq_password'] = hash['password'] if hash.key?('password') - conf['rabbitmq_queue'] = hash['queue'] if hash.key?('queue') - conf['rabbitmq_exchange_type'] = hash['rabbitmq_exchange_type'] if hash.key?('rabbitmq_exchange_type') - conf['rabbitmq_exchange'] = hash['exchange'] if hash.key?('exchange') - conf['rabbitmq_exchange_durable'] = hash['durable'] if hash.key?('durable') - conf['rabbitmq_key'] = hash['key'] if hash.key?('key') - when 'redis' then - outputs << 'redis' - host = hash['host'] || logstash_server_ip || 'localhost' - port = hash['port'] || '6379' - db = hash['db'] || '0' - conf['redis_url'] = "redis://#{host}:#{port}/#{db}" - conf['redis_namespace'] = hash['key'] if hash.key?('key') - when 'stdout' then - outputs << 'stdout' - when 'zmq', 'zeromq' then - outputs << 'zmq' - host = hash['host'] || logstash_server_ip || 'localhost' - port = hash['port'] || '2120' - conf['zeromq_address'] = "tcp://#{host}:#{port}" - else - log("output type not supported: #{name}") { level :warn } - end - end -end - -conf['logstash_version'] = node['logstash']['server']['version'] >= '1.2' ? '1' : '0' - -output = outputs[0] -log("multiple outpus detected, will consider only the first: #{output}") { level :warn } if outputs.length > 1 -cmd = "beaver -t #{output} -c #{conf_file} -F #{format}" - -template conf_file do - source 'beaver.conf.erb' - mode 0640 - owner node['logstash']['user'] - group node['logstash']['group'] - variables( - conf: conf, - files: files - ) - notifies :restart, 'service[logstash_beaver]' -end - -# use upstart when supported to get nice things like automatic respawns -use_upstart = false -supports_setuid = false -case node['platform_family'] -when 'rhel' - use_upstart = true if node['platform_version'].to_i >= 6 -when 'fedora' - use_upstart = true if node['platform_version'].to_i >= 9 -when 'debian' - use_upstart = true - supports_setuid = true if node['platform_version'].to_f >= 12.04 -end - -if use_upstart - template '/etc/init/logstash_beaver.conf' do - mode '0644' - source 'logstash_beaver.conf.erb' - variables( - cmd: cmd, - group: node['logstash']['supervisor_gid'], - user: node['logstash']['user'], - log: log_file, - supports_setuid: supports_setuid - ) - notifies :restart, 'service[logstash_beaver]' - end - - service 'logstash_beaver' do - supports restart: true, reload: false - action [:enable, :start] - provider Chef::Provider::Service::Upstart - end -else - template '/etc/init.d/logstash_beaver' do - mode '0755' - source 'init-beaver.erb' - variables( - cmd: cmd, - pid_file: pid_file, - user: node['logstash']['user'], - log: log_file, - platform: node['platform'] - ) - notifies :restart, 'service[logstash_beaver]' - end - - service 'logstash_beaver' do - supports restart: true, reload: false, status: true - action [:enable, :start] - end -end - -logrotate_app 'logstash_beaver' do - cookbook 'logrotate' - path log_file - frequency 'daily' - postrotate node['logstash']['beaver']['logrotate']['postrotate'] - options node['logstash']['beaver']['logrotate']['options'] - rotate 30 - create "0640 #{node['logstash']['user']} #{node['logstash']['group']}" -end +include_recipe 'python::default' +include_recipe 'beaver::default' diff --git a/recipes/pyshipper.rb b/recipes/pyshipper.rb deleted file mode 100644 index d5a9fe8..0000000 --- a/recipes/pyshipper.rb +++ /dev/null @@ -1,27 +0,0 @@ -# Encoding: utf-8 -# -# Author:: John E. Vincent -# Copyright 2012, John E. Vincent -# License: Apache 2.0 -# Cookbook Name:: logstash -# Recipe:: pyshipper -# -# -include_recipe 'build-essential' -include_recipe 'logstash::default' -include_recipe 'python::pip' -include_recipe 'git' - -package 'python-dev' - -git "#{node['logstash']['basedir']}/shipper" do - repository 'git://github.com/lusis/logstash-shipper.git' - reference 'master' - action :sync -end - -%w(pyzmq-static simplejson argparse).each do |ppkg| - python_pip ppkg do - action :install - end -end diff --git a/recipes/server.rb b/recipes/server.rb index 53d29ec..8e26810 100644 --- a/recipes/server.rb +++ b/recipes/server.rb @@ -14,34 +14,16 @@ name = 'server' -Chef::Application.fatal!("attribute hash node['logstash']['instance']['#{name}'] must exist.") if node['logstash']['instance'][name].nil? - -# these should all default correctly. listing out for example. logstash_instance name do action :create end -# services are hard! Let's go LWRP'ing. FIREBALL! FIREBALL! FIREBALL! logstash_service name do - action [:enable] -end - -my_templates = node['logstash']['instance'][name]['config_templates'] - -if my_templates.nil? - my_templates = { - 'input_syslog' => 'config/input_syslog.conf.erb', - 'output_stdout' => 'config/output_stdout.conf.erb', - 'output_elasticsearch' => 'config/output_elasticsearch.conf.erb' - } + action [:enable, :start] end logstash_config name do - templates my_templates action [:create] - variables( - elasticsearch_embedded: true - ) notifies :restart, "logstash_service[#{name}]" end # ^ see `.kitchen.yml` for example attributes to configure templates. diff --git a/resources/curator.rb b/resources/curator.rb index 9d3bf70..2048746 100644 --- a/resources/curator.rb +++ b/resources/curator.rb @@ -15,3 +15,4 @@ attribute :hour, kind_of: String attribute :log_file, kind_of: String attribute :user, kind_of: String +attribute :bin_dir, kind_of: String diff --git a/resources/service.rb b/resources/service.rb index b335a32..1529976 100644 --- a/resources/service.rb +++ b/resources/service.rb @@ -17,4 +17,6 @@ attribute :description, kind_of: String attribute :user, kind_of: String attribute :group, kind_of: String -attribute :templates_cookbook, kind_of: String +attribute :templates_cookbook, kind_of: String +attribute :runit_run_template_name, kind_of: String +attribute :runit_log_template_name, kind_of: String diff --git a/templates/default/agent.conf.erb b/templates/default/agent.conf.erb deleted file mode 100644 index dd52453..0000000 --- a/templates/default/agent.conf.erb +++ /dev/null @@ -1,37 +0,0 @@ -# This file was created for <%= node.name %> -# by Chef -# Manual changes will be lost -input { - <% if node['logstash']['agent']['inputs'].empty? -%> - file { - type => "sample-logs" - path => ["/var/log/*.log"] - exclude => ["*.gz"] - debug => true - } - <% else %> - <%= LogstashConf.section_to_str(node['logstash']['agent']['inputs']) %> - <% end -%> -} - -<% unless node['logstash']['agent']['filters'].empty? -%> -filter { - <%= LogstashConf.section_to_str(node['logstash']['agent']['filters'], node['logstash']['agent']['version'], @patterns_dir) %> -} -<% end -%> - -output { - <% if node['logstash']['agent']['debug'] -%> - stdout { } - <% end -%> - <% if node['logstash']['agent']['outputs'].empty? -%> - <% if @logstash_server_ip.empty? -%> - # Provide a sane default - null { } - <% else -%> - tcp { host => "<%= @logstash_server_ip %>" port => "5959" } - <% end -%> - <% else -%> - <%= LogstashConf.section_to_str(node['logstash']['agent']['outputs']) %> - <% end -%> -} diff --git a/templates/default/beaver.conf.erb b/templates/default/beaver.conf.erb deleted file mode 100644 index 2967633..0000000 --- a/templates/default/beaver.conf.erb +++ /dev/null @@ -1,24 +0,0 @@ -[beaver] -<% @conf.each do |key, value| -%> -<%= key %>: <%= value %> -<% end -%> - -<% @files.each do |file| -%> -<% file['path'].each do |path| -%> -[<%= path %>] -type: <%= file['type'] || 'file' %> -<% if file.has_key?('tags') -%> -tags: <%= file['tags'].join(',') %> -<% end -%> -<% if file.has_key?('add_field') -%> -add_field: <%= file['add_field'].join(',') %> -<% end -%> -<% if file.has_key?('regex_after') %> -multiline_regex_after: <%= file['regex_after'] %> -<% end %> -<% if file.has_key?('regex_before') %> -multiline_regex_before: <%= file['regex_before'] %> -<% end %> - -<% end -%> -<% end -%> diff --git a/templates/default/config/output_elasticsearch_http.conf.erb b/templates/default/config/output_elasticsearch_http.conf.erb new file mode 100644 index 0000000..f126ddb --- /dev/null +++ b/templates/default/config/output_elasticsearch_http.conf.erb @@ -0,0 +1,12 @@ +output { + elasticsearch_http { +<% if @elasticsearch_ip -%> + host => "<%= @elasticsearch_ip %>" +<% else -%> + host => "127.0.0.1" +<% end -%> +<% if @es_index -%> + index => "<%= @es_index %>" +<% end -%> + } +} diff --git a/templates/default/init-beaver.erb b/templates/default/init-beaver.erb deleted file mode 100644 index 84de0ad..0000000 --- a/templates/default/init-beaver.erb +++ /dev/null @@ -1,142 +0,0 @@ -#!/bin/bash - -<% if [ "redhat", "centos","amazon", "fedora" ].include?(@platform) -%> -# -# beaver -# -# chkconfig: - 57 47 -# description: Log Sender provided by beaver -# processname: beaver -<% else -%> -### BEGIN INIT INFO -# Provides: beaver -# Required-Start: $local_fs $remote_fs $network -# Required-Stop: $local_fs $remote_fs $network -# Default-Start: 2 3 4 5 -# Default-Stop: 0 1 6 -# Short-Description: Start up the Beaver at boot time -# Description: Enable Log Sender provided by beaver. -### END INIT INFO -<% end -%> - - -BEAVER_NAME='beaver' -BEAVER_CMD='<%= @cmd %>' -BEAVER_PID='<%= @pid_file %>' -BEAVER_USER='<%= @user %>' -BEAVER_LOG='<%= @log %>' - - -PATH='/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin' -export PATH -IFS=$' \t\n' -export IFS - -BEAVER_BIN="$(which "${BEAVER_NAME}")" - -[ -r /etc/init.d/functions ] && . /etc/init.d/functions -[ -r /lib/lsb/init-functions ] && . /lib/lsb/init-functions -[ -r "/etc/default/${BEAVER_NAME}" ] && . "/etc/default/${BEAVER_NAME}" - -do_start() { - test -f "${BEAVER_BIN}" || exit 0 - if is_up - then - echo $'Log Sender server daemon already started.' - return 0 - fi - echo -n $"Log Sender server daemon: ${BEAVER_NAME}" - su - "${BEAVER_USER}" -s '/bin/bash' -c "${BEAVER_CMD} >> ${BEAVER_LOG} 2>&1 & echo \$! > ${BEAVER_PID}" - echo '.' -} - -do_stop() { - test -f "${BEAVER_BIN}" || exit 0 - if ! is_up - then - echo $'Log Sender server daemon already stopped.' - return 0 - fi - echo -n $"Stopping Log Sender server daemon: ${BEAVER_NAME}" - do_kill - local I='0' - while is_up - do - echo -n '.' - if [ "${I}" -gt 10 ] - then - do_kill_force - I='0' - else - do_kill - fi - sleep 1 - I="$((I+1))" - done - echo '.' -} - -beaver_pid() { - tail -1 "${BEAVER_PID}" 2> /dev/null -} - -is_up() { - PID="$(beaver_pid)" - [ x"${PID}" != x ] && ps -p "${PID}" -o comm,args h 2> /dev/null | grep -qFw "${BEAVER_NAME}" -} - -do_kill() { - PID="$(beaver_pid)" - [ x"${PID}" != x ] && su - "${BEAVER_USER}" -c "kill -TERM ${PID}" -} - -do_kill_force() { - PID="$(beaver_pid)" - echo -n 'force' - [ x"${PID}" != x ] && su - "${BEAVER_USER}" -c "kill -KILL ${PID}" -} - -do_restart() { - test -f "${BEAVER_BIN}" || exit 0 - do_stop - sleep 1 - do_start -} - -do_status() { - test -f "${BEAVER_BIN}" || exit 0 - if is_up - then - echo "${BEAVER_NAME} is running." - exit 0 - else - echo "${BEAVER_NAME} is not running." - exit 1 - fi -} - -do_usage() { - echo $"Usage: $0 {start | stop | restart | force-reload | status}" - exit 1 -} - -case "$1" in -start) - do_start - exit "$?" - ;; -stop) - do_stop - exit "$?" - ;; -restart|force-reload) - do_restart - exit "$?" - ;; -status) - do_status - ;; -*) - do_usage - ;; -esac - diff --git a/templates/default/init/sysvinit/java.erb b/templates/default/init/sysvinit/java.erb deleted file mode 100755 index 6144580..0000000 --- a/templates/default/init/sysvinit/java.erb +++ /dev/null @@ -1,173 +0,0 @@ -#!/usr/bin/env bash -# -# logstash -# -# chkconfig: - 57 47 -# description: logstash -# processname: logstash - - -PIDDIR="/var/run" -export PIDFILE="/var/run/logstash-<%= @name %>.pid" -export LS_HOME="<%= @home %>" -export LS_CONFIG="<%= @config_file %>" -LS_USER="<%= @user %>" -LS_GROUP="<%= @group %>" -LS_LOG="<%= @log_file %>" -LOGDIR="<%= ::File.dirname @log_file %>" -export JAVA_OPTS="-server -Xms<%= @min_heap %> -Xmx<%= @max_heap %> -Djava.io.tmpdir=$LS_HOME/tmp/ <%= @java_opts %> <%= '-Djava.net.preferIPv4Stack=true' if @ipv4_only %>" -BIN_SCRIPT="/usr/bin/env java $JAVA_OPTS -jar $LS_HOME/lib/logstash.jar agent -f $LS_CONFIG > $LS_LOG 2>&1 & echo \$! > $PIDFILE" - -if [ -f /etc/init.d/functions ] ; then - . /etc/init.d/functions -fi - -start() { - - if [ ! -d "$PIDDIR" ] ; then - mkdir "$PIDDIR" - chown -R $LS_USER:$LS_GROUP $PIDDIR - fi - - if [ ! -d "$LOGDIR" ] ; then - mkdir "$LOGDIR" - fi - - chown -R $LS_USER:$LS_GROUP $LOGDIR $PIDDIR - - - if [ -f $PIDFILE ]; then - echo -e "\033[31;1mPID file found in $PIDFILE, already running?\033[0m" - ls_pid="$(cat $PIDFILE)" - pid_running="$( ps ax | grep 'java' | grep $ls_pid )" - - if [ ! -z "$pid_running" ] ; then - echo -e "\033[31;1mPID $ls_pid still alive, logstash is already running. Doing nothing\033[0m" - return 1 - fi - fi - - echo -e "\033[1mStarting logstash...\033[0m" - pushd $LS_HOME > /dev/null 2>&1 - su $LS_USER -c "$BIN_SCRIPT" > /dev/null 2>&1 - ls_pid=$! - result=$? - popd > /dev/null 2>&1 - - if [ $result -ne 0 ] ; then - failure - echo -e "Logstash did not start successfully" - exit 1 - else - success - echo -e "Logstash started successfully" - fi - -} - - - -function stop() { - echo -n -e "\033[1mStopping logstash...\033[0m" - - if [ -z "$SHUTDOWN_WAIT" ]; then - SHUTDOWN_WAIT=5 - fi - - if [ ! -z "$PIDFILE" ]; then - if [ -f "$PIDFILE" ]; then - kill -0 `cat $PIDFILE` >/dev/null 2>&1 - if [ $? -gt 0 ]; then - echo "PID file ($PIDFILE) found but no matching process was found. Nothing to do." - return 0 - fi - else - echo "\$PIDFILE was set ($PIDFILE) but the specified file does not exist. Is Logstash running? Assuming it has stopped and pro\ - ceeding." - return 0 - fi - fi - - kill `cat $PIDFILE` >/dev/null 2>&1 - - if [ ! -z "$PIDFILE" ]; then - if [ -f "$PIDFILE" ]; then - while [ $SHUTDOWN_WAIT -ge 0 ]; do - kill -0 `cat $PIDFILE` >/dev/null 2>&1 - if [ $? -gt 0 ]; then - rm $PIDFILE - break - fi - if [ $SHUTDOWN_WAIT -gt 0 ]; then - sleep 1 - fi - SHUTDOWN_WAIT=`expr $SHUTDOWN_WAIT - 1 ` - done - # still not dead, we may need to resort to drastic measures - if [ -f "$PIDFILE" ]; then - kill -0 `cat $PIDFILE` >/dev/null 2>&1 - if [ $? -eq 0 ]; then - echo "Application still alive, sleeping for 20 seconds before sending SIGKILL" - sleep 20 - kill -0 `cat $PIDFILE` >/dev/null 2>&1 - if [ $? -eq 0 ]; then - kill -9 `cat $PIDFILE` >/dev/null 2>&1 - echo "Killed with extreme prejudice" - else - echo "Application stopped, no need to use SIGKILL" - fi - rm $PIDFILE - fi - fi - fi - fi -} - -restart() { - stop - start -} - -status() { - # GOT PIDFILE? - [ -f $PIDFILE ] && pid=$(cat $PIDFILE) - - # RUNNING - if [[ $pid && -d "/proc/$pid" ]]; then - success - echo -e "Logstash is running with pid $pid" - fi - - # NOT RUNNING - if [[ ! $pid || ! -d "/proc/$pid" ]]; then - echo "Logstash not running" - exit 3 - fi - - # STALE PID FOUND - if [[ ! -d "/proc/$pid" && -f $PIDFILE ]]; then - echo -e "\033[1;31;40m[!] Stale PID found in $PIDFILE\033[0m" - exit 1 - fi -} - - -case "$1" in - start) - start - ;; - stop) - stop - ;; - restart) - restart - ;; - status) - status $2 - ;; - *) - echo $"Usage: $0 {start|stop|restart|status [-v]|}" - exit 1 -esac - -exit $? diff --git a/templates/default/init/upstart/java.erb b/templates/default/init/upstart/java.erb deleted file mode 100644 index 0e577e7..0000000 --- a/templates/default/init/upstart/java.erb +++ /dev/null @@ -1,42 +0,0 @@ -description "Logstash" -author "Chef" - -start on (filesystem and net-device-up) -stop on runlevel [!2345] - -respawn -respawn limit 5 30 -limit nofile 65550 65550 - -chdir <%= @home %> - -<% if @user_supported -%> -setuid <%= @user %> - <% unless @supervisor_gid.to_s.empty? -%> -setgid <%= @supervisor_gid %> - <% end -%> -<% end -%> - -script - export LOGSTASH_HOME="<%= @home %>" - export HOME=$LOGSTASH_HOME - - export GC_OPTS="<%= @gc_opts %>" - export JAVA_OPTS="-server -Xms<%= @min_heap %> -Xmx<%= @max_heap %> -Djava.io.tmpdir=$LOGSTASH_HOME/tmp/ <%= @java_opts %> <%= '-Djava.net.preferIPv4Stack=true' if @ipv4_only %>" - export LOGSTASH_OPTS="agent -f $LOGSTASH_HOME/etc/conf.d" - export LOGSTASH_OPTS="$LOGSTASH_OPTS --pluginpath $LOGSTASH_HOME/lib" - export LOGSTASH_OPTS="$LOGSTASH_OPTS -w <%= @workers %>" - export LOGSTASH_OPTS="$LOGSTASH_OPTS -l $LOGSTASH_HOME/log/<%= @log_file %>" - <% if @debug -%> - export LOGSTASH_OPTS="$LOGSTASH_OPTS -vv" - <% end -%> - export OPTS="$JAVA_OPTS $GC_OPTS -jar $LOGSTASH_HOME/lib/logstash.jar $LOGSTASH_OPTS" - - <% if @user_supported -%> - exec /usr/bin/java $OPTS - <% else -%> - exec sudo -u <%= @user %> /usr/bin/java $OPTS - <% end -%> -end script - -emits logstash-server-running diff --git a/templates/default/init/upstart/tarball.erb b/templates/default/init/upstart/tarball.erb index 5189a4e..895149b 100644 --- a/templates/default/init/upstart/tarball.erb +++ b/templates/default/init/upstart/tarball.erb @@ -22,7 +22,8 @@ script export LOGSTASH_HOME="<%= @home %>" export HOME=$LOGSTASH_HOME export LOGSTASH_OPTS="<%= @args.join(' ') %>" - + export GC_OPTS="<%= @gc_opts %>" + export JAVA_OPTS="-server -Xms<%= @min_heap %> -Xmx<%= @max_heap %> -Djava.io.tmpdir=$LOGSTASH_HOME/tmp/ <%= @java_opts %> <%= '-Djava.net.preferIPv4Stack=true' if @ipv4_only %>" <% if @user_supported -%> exec $LOGSTASH_HOME/bin/logstash $LOGSTASH_OPTS <% else -%> diff --git a/templates/default/logstash_beaver.conf.erb b/templates/default/logstash_beaver.conf.erb deleted file mode 100644 index bf9a56f..0000000 --- a/templates/default/logstash_beaver.conf.erb +++ /dev/null @@ -1,22 +0,0 @@ -description "Logstash beaver" -author "Chef" - -start on (filesystem and net-device-up) -stop on runlevel [!2345] - -respawn -respawn limit 5 30 - -<% if @supports_setuid %> -setuid <%= @user %> -<% unless @group.to_s.empty? -%> -setgid <%= @group %> -<% end -%> -<% end %> -chdir <%= node['logstash']['basedir'] %>/beaver - -console output - -exec <%= @cmd %> >> <%= @log %> 2>&1 - -emits logstash-beaver-running diff --git a/templates/default/logstash_web.conf.erb b/templates/default/logstash_web.conf.erb deleted file mode 100644 index c60d7b9..0000000 --- a/templates/default/logstash_web.conf.erb +++ /dev/null @@ -1,25 +0,0 @@ -description "Logstash" -author "Chef" - -start on (filesystem and net-device-up) -stop on runlevel [!2345] - -respawn -respawn limit 5 30 -limit nofile 65550 65550 - -chdir <%= node['logstash']['server']['home'] %> -setuid <%= node['logstash']['user'] %> - -script - export LOGSTASH_HOME="<%= node['logstash']['server']['home'] %>" - export HOME=$LOGSTASH_HOME - export GC_OPTS="<%= node['logstash']['server']['gc_opts'] %>" - export JAVA_OPTS="-server -Xms<%= node['logstash']['server']['xms'] %> -Xmx<%= node['logstash']['server']['xmx'] %> -Djava.io.tmpdir=$LOGSTASH_HOME/tmp/ <%= node['logstash']['server']['java_opts'] %> <%= '-Djava.net.preferIPv4Stack=true' if node['logstash']['agent']['ipv4_only'] %>" - export LOGSTASH_OPTS="web -a <%= node['logstash']['server']['web']['address'] %> -p <%= node['logstash']['server']['web']['port'] %>" - export OPTS="$JAVA_OPTS $GC_OPTS -jar $LOGSTASH_HOME/lib/logstash.jar $LOGSTASH_OPTS" - - exec /usr/bin/java $OPTS -end script - -emits logstash-web-running diff --git a/templates/default/sv-logstash_agent-log-run.erb b/templates/default/sv-logstash-log-run.erb similarity index 100% rename from templates/default/sv-logstash_agent-log-run.erb rename to templates/default/sv-logstash-log-run.erb diff --git a/templates/default/sv-logstash-run.erb b/templates/default/sv-logstash-run.erb new file mode 100644 index 0000000..5de12e7 --- /dev/null +++ b/templates/default/sv-logstash-run.erb @@ -0,0 +1,23 @@ +#!/bin/sh + +ulimit -Hn 65550 +ulimit -Sn 65550 + +cd /<%= @options[:home] %> +exec 2>&1 +# Need to set LOGSTASH_HOME and HOME so sincedb will work +export LOGSTASH_HOME="<%= @options[:home] %>" +export GC_OPTS="<%= @options[:gc_opts] %>" +export JAVA_OPTS="-server -Xms<%= @options[:min_heap] %> -Xmx<%= @options[:max_heap] %> -Djava.io.tmpdir=$LOGSTASH_HOME/tmp/ <%= @options[:java_opts] %> <%= '-Djava.net.preferIPv4Stack=true' if @options[:ipv4_only] %>" +LOGSTASH_OPTS="agent -f $LOGSTASH_HOME/etc/conf.d" +LOGSTASH_OPTS="$LOGSTASH_OPTS --pluginpath $LOGSTASH_HOME/lib" +<% if @options[:debug] -%> +LOGSTASH_OPTS="$LOGSTASH_OPTS -vv" +<% end -%> +LOGSTASH_OPTS="$LOGSTASH_OPTS -l $LOGSTASH_HOME/log/<%= @options[:log_file] %>" +export LOGSTASH_OPTS="$LOGSTASH_OPTS -w <%= @options[:workers] %>" +<% if @options[:supervisor_gid] -%> +HOME=$LOGSTASH_HOME exec chpst -u <%= @options[:user] %>:<%= @options[:supervisor_gid] %> $LOGSTASH_HOME/bin/logstash $LOGSTASH_OPTS +<% else -%> +HOME=$LOGSTASH_HOME exec chpst -u <%= @options[:user] %> $LOGSTASH_HOME/bin/logstash $LOGSTASH_OPTS +<% end -%> diff --git a/templates/default/sv-logstash_agent-run.erb b/templates/default/sv-logstash_agent-run.erb deleted file mode 100644 index 5225bbe..0000000 --- a/templates/default/sv-logstash_agent-run.erb +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/sh - -cd /<%= @options[:home] %> -exec 2>&1 -# Need to set LOGSTASH_HOME and HOME so sincedb will work -LOGSTASH_HOME="<%= @options[:home] %>" -GC_OPTS="-XX:+UseParallelOldGC" -JAVA_OPTS="-server -Xms<%= @options[:min_heap] %> -Xmx<%= @options[:max_heap] %> -Djava.io.tmpdir=$LOGSTASH_HOME/tmp/" -LOGSTASH_OPTS="agent -f $LOGSTASH_HOME/etc/conf.d" -<% if @options[:debug] -%> -LOGSTASH_OPTS="$LOGSTASH_OPTS -vv" -<% end -%> -LOGSTASH_OPTS="$LOGSTASH_OPTS -l $LOGSTASH_HOME/log/<%= @options[:log_file] %>" -export LOGSTASH_OPTS="$LOGSTASH_OPTS -w <%= @options[:workers] %>" -<% if @options[:install_type] == 'tarball' -%> -<% if @options[:supervisor_gid] -%> -HOME=$LOGSTASH_HOME exec chpst -u <%= @options[:user] %>:<%= @options[:supervisor_gid] %> $LOGSTASH_HOME/bin/logstash $LOGSTASH_OPTS -<% else -%> -HOME=$LOGSTASH_HOME exec chpst -u <%= @options[:user] %> $LOGSTASH_HOME/bin/logstash $LOGSTASH_OPTS -<% end -%> -<% else -%> -<% if @options[:supervisor_gid] -%> -HOME=$LOGSTASH_HOME exec chpst -u <%= @options[:user] %>:<%= @options[:supervisor_gid] %> java $JAVA_OPTS $GC_OPTS -jar $LOGSTASH_HOME/lib/logstash.jar $LOGSTASH_OPTS -<% else -%> -HOME=$LOGSTASH_HOME exec chpst -u <%= @options[:user] %> java $JAVA_OPTS $GC_OPTS -jar $LOGSTASH_HOME/lib/logstash.jar $LOGSTASH_OPTS -<% end -%> -<% end -%> diff --git a/templates/default/sv-logstash_server-log-run.erb b/templates/default/sv-logstash_server-log-run.erb deleted file mode 100644 index a79a518..0000000 --- a/templates/default/sv-logstash_server-log-run.erb +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -exec svlogd -tt ./main diff --git a/templates/default/sv-logstash_server-run.erb b/templates/default/sv-logstash_server-run.erb deleted file mode 100644 index 0fc077a..0000000 --- a/templates/default/sv-logstash_server-run.erb +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/sh - -ulimit -Hn 65550 -ulimit -Sn 65550 - -cd /<%= @options[:home] %> -exec 2>&1 -# Need to set LOGSTASH_HOME and HOME so sincedb will work -LOGSTASH_HOME="<%= @options[:home] %>" -GC_OPTS="<%= @options[:gc_opts] %>" -JAVA_OPTS="-server -Xms<%= @options[:min_heap] %> -Xmx<%= @options[:max_heap] %> -Djava.io.tmpdir=$LOGSTASH_HOME/tmp/ <%= @options[:java_opts] %> <%= '-Djava.net.preferIPv4Stack=true' if @options[:ipv4_only] %>" -LOGSTASH_OPTS="agent -f $LOGSTASH_HOME/etc/conf.d" -LOGSTASH_OPTS="$LOGSTASH_OPTS --pluginpath $LOGSTASH_HOME/lib" -<% if @options[:debug] -%> -LOGSTASH_OPTS="$LOGSTASH_OPTS -vv" -<% end -%> -LOGSTASH_OPTS="$LOGSTASH_OPTS -l $LOGSTASH_HOME/log/<%= @options[:log_file] %>" -export LOGSTASH_OPTS="$LOGSTASH_OPTS -w <%= @options[:workers] %>" -<% if @options[:install_type] == 'tarball' -%> -<% if @options[:supervisor_gid] -%> -HOME=$LOGSTASH_HOME exec chpst -u <%= @options[:user] %>:<%= @options[:supervisor_gid] %> $LOGSTASH_HOME/bin/logstash $LOGSTASH_OPTS -<% else -%> -HOME=$LOGSTASH_HOME exec chpst -u <%= @options[:user] %> $LOGSTASH_HOME/bin/logstash $LOGSTASH_OPTS -<% end -%> -<% else -%> -<% if @options[:supervisor_gid] -%> -HOME=$LOGSTASH_HOME exec chpst -u <%= @options[:user] %>:<%= @options[:supervisor_gid] %> java $JAVA_OPTS $GC_OPTS -jar $LOGSTASH_HOME/lib/logstash.jar $LOGSTASH_OPTS -<% else -%> -HOME=$LOGSTASH_HOME exec chpst -u <%= @options[:user] %> java $JAVA_OPTS $GC_OPTS -jar $LOGSTASH_HOME/lib/logstash.jar $LOGSTASH_OPTS -<% end -%> -<% end -%> diff --git a/test/integration/server/serverspec/spec_helper.rb b/test/integration/server/serverspec/spec_helper.rb index 219f365..a2776ae 100644 --- a/test/integration/server/serverspec/spec_helper.rb +++ b/test/integration/server/serverspec/spec_helper.rb @@ -1,8 +1,8 @@ # Encoding: utf-8 require 'serverspec' -include Serverspec::Helper::Exec -include Serverspec::Helper::DetectOS +# Required by serverspec +set :backend, :exec RSpec.configure do |c| c.before :all do diff --git a/test/unit/spec/lwrp_config_spec.rb b/test/unit/spec/lwrp_config_spec.rb index 87cc99c..b6a4c56 100644 --- a/test/unit/spec/lwrp_config_spec.rb +++ b/test/unit/spec/lwrp_config_spec.rb @@ -18,12 +18,15 @@ runner.node.set['logstash']['instance']['server']['config_templates_cookbook'] = 'logstash' runner.node.set['logstash']['instance']['server']['elasticsearch_ip'] = '127.0.0.1' runner.node.set['logstash']['instance']['server']['enable_embedded_es'] = true + runner.node.set['logstash']['instance']['server']['config_templates'] = { + output_stdout: 'config/output_stdout.conf.erb' + } runner.converge(described_recipe) end include_context 'stubs-common' it 'installs the output_stdout template' do - expect(chef_run).to create_template('/opt/logstash/server/etc/conf.d/output_stdout.conf').with( + expect(chef_run).to create_template('/opt/logstash/server/etc/conf.d/output_stdout').with( source: 'config/output_stdout.conf.erb', cookbook: 'logstash', owner: 'logstash', @@ -33,30 +36,5 @@ ) end - it 'installs the input_syslog template' do - expect(chef_run).to create_template('/opt/logstash/server/etc/conf.d/input_syslog.conf').with( - source: 'config/input_syslog.conf.erb', - cookbook: 'logstash', - owner: 'logstash', - group: 'logstash', - mode: '0644', - action: [:create] - ) - end - - it 'installs the output_elasticsearch template' do - expect(chef_run).to create_template('/opt/logstash/server/etc/conf.d/output_elasticsearch.conf').with( - source: 'config/output_elasticsearch.conf.erb', - cookbook: 'logstash', - owner: 'logstash', - group: 'logstash', - mode: '0644', - variables: { - elasticsearch_embedded: true - }, - action: [:create] - ) - end - end end diff --git a/test/unit/spec/spec_helper.rb b/test/unit/spec/spec_helper.rb index 9b95abc..7af77f0 100644 --- a/test/unit/spec/spec_helper.rb +++ b/test/unit/spec/spec_helper.rb @@ -5,6 +5,8 @@ require 'chefspec/server' require 'chef/application' +require_relative 'support/matchers' + ::LOG_LEVEL = :fatal ::REDHAT_OPTS = { diff --git a/test/unit/spec/support/matchers.rb b/test/unit/spec/support/matchers.rb new file mode 100644 index 0000000..38aacbc --- /dev/null +++ b/test/unit/spec/support/matchers.rb @@ -0,0 +1,9 @@ +# Encoding: utf-8 + +def enable_runit_service(resource_name) + ChefSpec::Matchers::ResourceMatcher.new(:runit_service, :enable, resource_name) +end + +def start_runit_service(resource_name) + ChefSpec::Matchers::ResourceMatcher.new(:runit_service, :start, resource_name) +end