diff --git a/.circleci/config.yml b/.circleci/config.yml index 4e7d3053..61293a96 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,11 +9,11 @@ jobs: build-jruby-1.7.26: <<: *defaults docker: - - image: killbill/kbbuild:0.2.0 + - image: killbill/kbbuild:0.7.0 steps: - checkout - restore_cache: - key: v4-dependencies-jruby-1.7.26-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-jruby-1.7.26-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} - run: name: Install gem dependencies command: | @@ -24,20 +24,20 @@ jobs: - kpm/Gemfile.lock - kpm/.bundle - kpm/vendor/bundle - key: v4-dependencies-jruby-1.7.26-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-jruby-1.7.26-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} test-mysql-jruby-1.7.26: <<: *defaults docker: - - image: killbill/kbbuild:0.2.0 - - image: killbill/killbill:0.18.16 - - image: killbill/mariadb:0.18 + - image: killbill/kbbuild:0.7.0 + - image: killbill/killbill:0.22.0 + - image: killbill/mariadb:0.22 environment: - MYSQL_ROOT_PASSWORD=root steps: - checkout - restore_cache: - key: v4-dependencies-jruby-1.7.26-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-jruby-1.7.26-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} - run: name: Setup DDL command: | @@ -92,11 +92,11 @@ jobs: build-ruby-2.2.2: <<: *defaults docker: - - image: killbill/kbbuild:0.2.0 + - image: killbill/kbbuild:0.7.0 steps: - checkout - restore_cache: - key: v4-dependencies-ruby-2.2.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-ruby-2.2.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} - run: name: Install gem dependencies command: | @@ -109,20 +109,20 @@ jobs: - kpm/Gemfile.lock - kpm/.bundle - kpm/vendor/bundle - key: v4-dependencies-ruby-2.2.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-ruby-2.2.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} test-mysql-ruby-2.2.2: <<: *defaults docker: - - image: killbill/kbbuild:0.2.0 - - image: killbill/killbill:0.18.16 - - image: killbill/mariadb:0.18 + - image: killbill/kbbuild:0.7.0 + - image: killbill/killbill:0.22.0 + - image: killbill/mariadb:0.22 environment: - MYSQL_ROOT_PASSWORD=root steps: - checkout - restore_cache: - key: v4-dependencies-ruby-2.2.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-ruby-2.2.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} - run: name: Setup DDL command: | @@ -179,11 +179,11 @@ jobs: build-ruby-2.4.2: <<: *defaults docker: - - image: killbill/kbbuild:0.2.0 + - image: killbill/kbbuild:0.7.0 steps: - checkout - restore_cache: - key: v4-dependencies-ruby-2.4.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-ruby-2.4.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} - run: name: Install gem dependencies command: | @@ -196,20 +196,20 @@ jobs: - kpm/Gemfile.lock - kpm/.bundle - kpm/vendor/bundle - key: v4-dependencies-ruby-2.4.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-ruby-2.4.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} test-mysql-ruby-2.4.2: <<: *defaults docker: - - image: killbill/kbbuild:0.2.0 - - image: killbill/killbill:0.18.16 - - image: killbill/mariadb:0.18 + - image: killbill/kbbuild:0.7.0 + - image: killbill/killbill:0.22.0 + - image: killbill/mariadb:0.22 environment: - MYSQL_ROOT_PASSWORD=root steps: - checkout - restore_cache: - key: v4-dependencies-ruby-2.4.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-ruby-2.4.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} - run: name: Setup DDL command: | @@ -263,14 +263,36 @@ jobs: path: /tmp/test-results destination: test-results + test-rubocop: + <<: *defaults + docker: + - image: killbill/kbbuild:0.7.0 + steps: + - checkout + - restore_cache: + key: v5-dependencies-ruby-2.4.2-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + - run: + name: Run RuboCop + command: | + mkdir /tmp/test-results + cd kpm + source /usr/share/rvm/scripts/rvm + rvm use ruby-2.4.2 + bundle exec rubocop --format tap | tee /tmp/test-results/rubocop.txt 2>&1 + - store_test_results: + path: /tmp/test-results + - store_artifacts: + path: /tmp/test-results + destination: test-results + build-jruby-9.1.14.0: <<: *defaults docker: - - image: killbill/kbbuild:0.2.0 + - image: killbill/kbbuild:0.7.0 steps: - checkout - restore_cache: - key: v4-dependencies-jruby-9.1.14.0-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-jruby-9.1.14.0-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} - run: name: Install gem dependencies command: | @@ -283,20 +305,20 @@ jobs: - kpm/Gemfile.lock - kpm/.bundle - kpm/vendor/bundle - key: v4-dependencies-jruby-9.1.14.0-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-jruby-9.1.14.0-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} test-mysql-jruby-9.1.14.0: <<: *defaults docker: - - image: killbill/kbbuild:0.2.0 - - image: killbill/killbill:0.18.16 - - image: killbill/mariadb:0.18 + - image: killbill/kbbuild:0.7.0 + - image: killbill/killbill:0.22.0 + - image: killbill/mariadb:0.22 environment: - MYSQL_ROOT_PASSWORD=root steps: - checkout - restore_cache: - key: v4-dependencies-jruby-9.1.14.0-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} + key: v5-dependencies-jruby-9.1.14.0-{{ .Branch }}-{{ checksum "kpm/kpm.gemspec" }} - run: name: Setup DDL command: | @@ -378,6 +400,9 @@ workflows: requires: - build-ruby-2.4.2 - test-mysql-jruby-1.7.26 + - test-rubocop: + requires: + - build-ruby-2.4.2 - test-mysql-jruby-9.1.14.0: requires: - build-jruby-9.1.14.0 diff --git a/.gitignore b/.gitignore index 4a42e43f..a58081e6 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,5 @@ logs/ ansible/*.retry ansible/hosts ansible-container/ansible-deployment +docker/templates/killbill/tagged/Dockerfile +docker/templates/killbill/tagged/kpm.yml diff --git a/ansible/README.md b/ansible/README.md index e1dc4d3d..8a89bf95 100644 --- a/ansible/README.md +++ b/ansible/README.md @@ -20,6 +20,7 @@ The roles can now be referenced in your playbooks via `killbill-cloud/ansible/ro See below for example playbooks. + # Usage ## killbill.yml playbook @@ -58,6 +59,18 @@ Example playbook on how to install Tomcat (Java is a pre-requisite): ansible-playbook -i tomcat.yml ``` +## migrations.yml playbook + +Playbook to manage Kill Bill (and its plugins) database migrations. + +Assuming Kill Bill is installed locally (`/var/lib/tomcat/webapps/ROOT.war` by default) and your `kpm.yml` (`/var/lib/killbill/kpm.yml` by default) points to the **new** version of Kill Bill: + +``` +ansible-playbook -i localhost, -e ansible_connection=local -e gh_token=XXX migrations.yml +``` + +This will install Flyway, fetch all migrations and prompt the user whether they should be applied. + ## plugin.yml playbook Allow to restart a specific plugin @@ -107,3 +120,36 @@ ansible -i -m killbill_facts -a 'killbill_web_path=/pat ``` Ansible requires the module file to start with `/usr/bin/ruby` to allow for shebang line substitution. If no Ruby interpreter is available at that path, you can configure it through `ansible_ruby_interpreter`, which is set per-host as an inventory variable associated with a host or group of hosts (e.g. `ansible_ruby_interpreter=/opt/kpm-0.5.2-linux-x86_64/lib/ruby/bin/ruby` in your host file). + + +# Testing Playbooks + +In order to test, one can an inventory: + +## Locally + +``` +# File localhost/inventory +[server] +127.0.0.1 ansible_user=sbrossier +``` + +``` +> ansible-playbook -v -i localhost/inventory -e java_home=/Library/Java/JavaVirtualMachines/jdk1.8.0_171.jdk/Contents/Home -u the _playbook.yml +``` + +## EC2 + +``` +# File ec2/inventory +[server] +ec2-18-233-67-208.compute-1.amazonaws.com ansible_user=ubuntu +``` + +``` +> ansible-playbook -v -i ec2/inventory -e java_home=/Library/Java/JavaVirtualMachines/jdk1.8.0_171.jdk/Contents/Home -u ubuntu the _playbook.yml +``` + + + + diff --git a/ansible/diagnostic.yml b/ansible/diagnostic.yml new file mode 100644 index 00000000..5e598cf8 --- /dev/null +++ b/ansible/diagnostic.yml @@ -0,0 +1,21 @@ +--- +- name: Gather diagnostics + hosts: all + tasks: + - name: setup Ruby + import_tasks: roles/common/tasks/main.yml + - name: setup KPM + import_tasks: roles/kpm/tasks/main.yml + - name: gather diagnostics + killbill_diagnostics: + kpm_path: "{{ kpm_path }}" + killbill_url: "{{ killbill_url|default('http://127.0.0.1:8080') }}" + killbill_web_path: "{{ catalina_base }}/{{ kb_webapps }}/ROOT.war" + killbill_user: "{{ killbill_user|default('admin') }}" + killbill_password: "{{ killbill_password|default('password') }}" + killbill_api_key: "{{ killbill_api_key|default('bob') }}" + killbill_api_secret: "{{ killbill_api_secret|default('lazar') }}" + killbill_account: "{{ killbill_account|default('') }}" + bundles_dir: "{{ kb_plugins_dir }}" + log_dir: "{{ catalina_base }}/logs" + tags: diagnostics \ No newline at end of file diff --git a/ansible/flyway.yml b/ansible/flyway.yml index 78fd7dfa..97c73aba 100644 --- a/ansible/flyway.yml +++ b/ansible/flyway.yml @@ -1,7 +1,10 @@ --- -- name: Flyway migrations +- name: Install Flyway hosts: all - roles: - - common - - kpm - - migrations + tasks: + - name: setup Ruby + import_tasks: roles/common/tasks/main.yml + - name: setup KPM + import_tasks: roles/kpm/tasks/main.yml + - name: install Flyway + import_tasks: roles/migrations/tasks/flyway.yml diff --git a/ansible/group_vars/all.yml b/ansible/group_vars/all.yml index 18f76fd8..39965f3a 100644 --- a/ansible/group_vars/all.yml +++ b/ansible/group_vars/all.yml @@ -3,57 +3,45 @@ nexus_url: https://oss.sonatype.org nexus_repository: releases kpm_install_dir: /opt -kpm_version: 0.6.5 +kpm_version: 0.8.1 kpm_path: "{{ kpm_install_dir }}/kpm-{{ kpm_version }}-linux-{{ ansible_architecture }}" catalina_home: /usr/share/tomcat -catalina_base: /opt/apache-tomcat -tomcat_owner: "{{ ansible_user_id }}" -tomcat_group: root +catalina_base: /var/lib/tomcat +tomcat_owner: tomcat +tomcat_group: tomcat + +# Webapps directory : {{ catalina_base }}/{{ kb_webapps }} +kb_webapps: webapps +kaui_webapps: webapps tomcat_foreground: false # Base directory for namespacing -kb_install_dir: /opt/killbill +kb_install_dir: /var/lib/killbill # Configuration files (killbill.properties, JRuby files, etc.) kb_config_dir: "{{ kb_install_dir }}/config" # Kill Bill plugins and OSGI bundles kb_plugins_dir: "{{ kb_install_dir }}/bundles" -# kpm.yml -- see https://github.com/killbill/killbill-cloud/tree/master/kpm -kpm_yml: - killbill: - version: 0.18.10 - webapp_path: "{{ catalina_base }}/webapps/ROOT.war" - nexus: - ssl_verify: false - url: "{{ nexus_url }}" - repository: "{{ nexus_repository }}" - plugins_dir: "{{ kb_plugins_dir }}" - plugins: - java: - - name: analytics - ruby: - - name: kpm - -# Kaui specific properties -killbill_url: http://127.0.0.1:8080 -killbill_api_key: bob -killbill_api_secret: lazar -kaui_db_adapter: mysql2 +# kpm.yml file +kpm_yml: "{{ kb_install_dir }}/kpm.yml" + +flyway_install_dir: /opt +flyway: java -jar {{ flyway_install_dir }}/killbill-flyway.jar -url='{{ lookup('env','KILLBILL_DAO_URL') }}' -user={{ lookup('env','KILLBILL_DAO_USER') }} -password={{ lookup('env','KILLBILL_DAO_PASSWORD') }} # Extra JVM properties (e.g. -Dlogback.configurationFile={{ kb_config_dir }}/logback.xml) kaui_system_properties: '' kb_system_properties: '' # Tomcat and JVM properties -- recommended defaults -jvm_initial_memory: 4G -jvm_max_memory: 4G +jvm_initial_memory: 512m +jvm_max_memory: 2G jvm_jdwp_port: 12345 jvm_jdwp_server: y jvm_cms_initiating_fraction_threshold: 50 -jvm_new_size: 600m -jvm_max_new_size: 1900m +jvm_new_size: 256m +jvm_max_new_size: 1024m jvm_survivor_ratio: 10 jvm_jmx_port: 8000 tomcat_port: 8080 -tomcat_max_threads: 150 +tomcat_max_threads: 100 diff --git a/ansible/kaui.yml b/ansible/kaui.yml new file mode 100644 index 00000000..2da45864 --- /dev/null +++ b/ansible/kaui.yml @@ -0,0 +1,12 @@ +--- +- name: Deploy Kaui + hosts: all + vars: + kpm_yml: /var/lib/kaui/kpm.yml + tasks: + - name: setup Ruby + import_tasks: roles/common/tasks/main.yml + - name: setup Tomcat + import_tasks: roles/tomcat/tasks/main.yml + - name: install Kaui + import_tasks: roles/kaui/tasks/main.yml diff --git a/ansible/killbill.yml b/ansible/killbill.yml index 97b9622c..5999721b 100644 --- a/ansible/killbill.yml +++ b/ansible/killbill.yml @@ -1,16 +1,6 @@ --- - name: Deploy Kill Bill hosts: all - vars: - catalina_base: /var/lib/tomcat - tomcat_owner: tomcat - tomcat_group: tomcat - kpm_install_dir: /opt - kpm_version: 0.6.2 - kpm_path: "{{ kpm_install_dir }}/kpm-{{ kpm_version }}-linux-{{ ansible_architecture }}" - kpm_yml: /var/lib/killbill/kpm.yml - kb_config_dir: /var/lib/killbill - kb_plugins_dir: /var/lib/killbill/bundles tasks: - name: setup Ruby import_tasks: roles/common/tasks/main.yml diff --git a/ansible/kpm.yml b/ansible/kpm.yml index 6a841e52..fb474e1a 100644 --- a/ansible/kpm.yml +++ b/ansible/kpm.yml @@ -1,12 +1,6 @@ --- - name: Deploy KPM hosts: all - vars: - kpm_install_dir: /opt - nexus_url: https://oss.sonatype.org - nexus_repository: releases - kpm_version: 0.6.2 - kpm_path: "{{ kpm_install_dir }}/kpm-{{ kpm_version }}-linux-{{ ansible_architecture }}" become: yes tasks: - name: setup diff --git a/ansible/library/killbill_diagnostics b/ansible/library/killbill_diagnostics new file mode 100755 index 00000000..54005019 --- /dev/null +++ b/ansible/library/killbill_diagnostics @@ -0,0 +1,50 @@ +#!/usr/bin/ruby +# WANT_JSON + +require 'json' +require 'logger' +require 'pathname' +require 'set' +require 'uri' +require 'yaml' + +data = {} +File.open(ARGV[0]) do |fh| + data = JSON.parse(fh.read()) +end + +unless data['kpm_path'].nil? + ENV['GEM_PATH']="#{data['kpm_path']}/lib/vendor/ruby/2.2.0" + Gem.clear_paths +end +require 'kpm' +require 'kpm/version' + +logger = Logger.new(STDOUT) +logger.level = Logger::INFO + +killbill_api_credentials = [data['killbill_api_key'], data['killbill_api_secret']] +killbill_credentials = [data['killbill_user'], data['killbill_password']] + +diagnostic = KPM::DiagnosticFile.new(nil, + killbill_api_credentials, + killbill_credentials, + data['killbill_url'], + nil, + nil, + nil, + nil, + data['kaui_web_path'], + data['killbill_web_path'], + data['bundles_dir'], + logger) + +killbill_account = data['killbill_account'].empty? ? nil : data['killbill_account'] +diagnostic_file = diagnostic.export_data(killbill_account, data['log_dir']) + +result = { + 'changed' => true, + 'diagnostic_file' => diagnostic_file +} + +print JSON.dump(result) diff --git a/ansible/library/killbill_facts b/ansible/library/killbill_facts index 4fd3fd31..2e010a07 100755 --- a/ansible/library/killbill_facts +++ b/ansible/library/killbill_facts @@ -1,8 +1,11 @@ #!/usr/bin/ruby # WANT_JSON -require 'pathname' require 'json' +require 'logger' +require 'pathname' +require 'set' +require 'yaml' data = {} File.open(ARGV[0]) do |fh| @@ -16,16 +19,23 @@ end require 'kpm' require 'kpm/version' -kpm_facts = KPM::System.new.information(data['bundles_dir'], - true, - data['config_file'], - data['kaui_web_path'], - data['killbill_web_path']) +logger = Logger.new(STDOUT) +logger.level = Logger::INFO + +kpm_facts = KPM::System.new(logger).information(data['bundles_dir'], + true, + data['config_file'], + data['kaui_web_path'], + data['killbill_web_path']) +versions_info = KPM::KillbillServerArtifact.info(data['version'], + data['overrides'], + data['ssl_verify']) result = { 'changed' => false, 'ansible_facts' => { - 'kpm' => JSON.parse(kpm_facts) + 'kpm' => JSON.parse(kpm_facts), + 'versions_info' => versions_info } } diff --git a/ansible/library/killbill_migrations b/ansible/library/killbill_migrations index b72abf91..c26fa1cf 100755 --- a/ansible/library/killbill_migrations +++ b/ansible/library/killbill_migrations @@ -2,19 +2,10 @@ # WANT_JSON require 'json' +require 'logger' require 'pathname' require 'set' - -# Temporary -- https://github.com/killbill/killbill-cloud/issues/78 -def with_captured_stdout - require 'stringio' - old_stdout = $stdout - $stdout = StringIO.new - yield - JSON.parse($stdout.string.split("\n")[-1]) -ensure - $stdout = old_stdout -end +require 'yaml' data = {} File.open(ARGV[0]) do |fh| @@ -28,12 +19,21 @@ end require 'kpm' require 'kpm/version' -kpm_facts = with_captured_stdout { KPM::System.new.information(data['bundles_dir'], - true, - data['config_file'], - data['kaui_web_path'], - data['killbill_web_path']) } +logger = Logger.new(STDOUT) +logger.level = Logger::INFO + +kpm_facts = KPM::System.new(logger).information(data['bundles_dir'], + true, + data['config_file'], + data['kaui_web_path'], + data['killbill_web_path']) +kpm_facts = JSON.parse(kpm_facts) + +kpm_yml = data['kpm_yml'] +if kpm_yml.is_a?(String) + kpm_yml = YAML::load_file(data['kpm_yml']) +end errors = [] all_migrations = { @@ -44,6 +44,36 @@ all_migrations = { } } +# Core migrations +kb_from_version = kpm_facts['killbill_information']['killbill']['version'] +kb_to_version = kpm_yml['killbill']['version'] +all_migrations[:from] = kb_from_version +all_migrations[:to] = kb_to_version + +if kb_from_version == 'not found' + print JSON.dump({ + 'failed' => true, + 'msg' => 'Unable to retrieve Kill Bill version' + }) + exit(1) +else + kb_from_tag = "killbill-#{kb_from_version}" + + if kb_to_version.nil? || kb_to_version == 'LATEST' + kb_to_version = KPM::Installer.get_kb_latest_stable_version + end + kb_to_tag = "killbill-#{kb_to_version}" + + migrations_dir = KPM::Migrations.new(kb_from_tag, kb_to_tag, "killbill/killbill", data['gh_token'], logger).save + + all_migrations[:killbill] = { + :from_tag => kb_from_tag, + :to_tag => kb_to_tag, + :table => 'schema_version', + :dir => migrations_dir + } unless migrations_dir.nil? +end + # Plugins migrations kpm_facts['plugin_information'].each do |plugin_name, plugin_details| from_version_details = plugin_details['versions'].find { |v| v['is_default'] } @@ -51,28 +81,43 @@ kpm_facts['plugin_information'].each do |plugin_name, plugin_details| errors << "#{plugin_name} disabled" next end + from_version = from_version_details['version'] + if from_version.nil? + errors << "Unable to retrieve current version for #{plugin_name}" + next + end - to_version_details = data['kpm_yml']['killbill']['plugins'][plugin_details['type']].find { |p| p['name'] == plugin_details['plugin_key'] } + to_version_details = kpm_yml['killbill']['plugins'][plugin_details['type']].find { |p| p['name'] == plugin_details['plugin_key'] } if to_version_details.nil? - # TODO Should we remove it? Upgrade it nonetheless? - errors << "#{plugin_name} not scheduled to be installed" + errors << "#{plugin_name} is not scheduled to be upgraded" next end - - from_version = from_version_details['version'] to_version = to_version_details['version'] + if to_version.nil? + _, _, _, _, to_version, _ = KPM::PluginsDirectory.lookup(to_version_details['name'], true, kb_to_version) + if to_version.nil? || to_version == 'LATEST' + errors << "#{plugin_name} isn't availble for upgrade" + next + end + end is_ruby = plugin_details['type'] == 'ruby' if is_ruby from_tag = "v#{from_version}" - to_tag = to_version.nil? ? nil : "v#{to_version}" + to_tag = "v#{to_version}" else from_tag = "#{plugin_details['artifact_id']}-#{from_version}" - to_tag = to_version.nil? ? nil : "#{plugin_details['artifact_id']}-#{to_version}" + to_tag = "#{plugin_details['artifact_id']}-#{to_version}" end repository = "killbill-#{plugin_details['artifact_id']}" - migrations_dir = KPM::Migrations.new(from_tag, to_tag, "killbill/#{repository}", data['gh_token']).save + begin + migrations_dir = KPM::Migrations.new(from_tag, to_tag, "killbill/#{repository}", data['gh_token'], logger).save + rescue OpenURI::HTTPError => e + errors << "#{plugin_name} isn't availble for upgrade" + next + end + next if migrations_dir.nil? if is_ruby @@ -92,26 +137,6 @@ kpm_facts['plugin_information'].each do |plugin_name, plugin_details| end end -# Core migrations -kb_from_version = kpm_facts['killbill_information']['killbill']['version'] -if kb_from_version == 'not found' - errors << "killbill version not found" -else - kb_from_tag = "killbill-#{kb_from_version}" - - kb_to_version = data['kpm_yml']['killbill']['version'] - kb_to_tag = kb_to_version.nil? || kb_to_version == 'LATEST' ? nil : "killbill-#{kb_to_version}" - - migrations_dir = KPM::Migrations.new(kb_from_tag, kb_to_tag, "killbill/killbill", data['gh_token']).save - - all_migrations[:killbill] = { - :from_tag => kb_from_tag, - :to_tag => kb_to_tag, - :table => 'schema_version', - :dir => migrations_dir - } unless migrations_dir.nil? -end - result = { 'changed' => !(all_migrations[:killbill].empty? && all_migrations[:plugins][:ruby].empty? && all_migrations[:plugins][:java].empty?), 'errors' => errors, diff --git a/ansible/migrations.yml b/ansible/migrations.yml new file mode 100644 index 00000000..5df55f3d --- /dev/null +++ b/ansible/migrations.yml @@ -0,0 +1,7 @@ +--- +- name: Database migrations + hosts: all + roles: + - common + - kpm + - migrations diff --git a/ansible/roles/kaui/tasks/main.yml b/ansible/roles/kaui/tasks/main.yml index 574b5d38..f5a70074 100644 --- a/ansible/roles/kaui/tasks/main.yml +++ b/ansible/roles/kaui/tasks/main.yml @@ -1,12 +1,4 @@ --- -- name: ensure Kill Bill dirs exist - become: true - file: path={{ item }} state=directory owner={{ tomcat_owner }} group={{ tomcat_group }} - with_items: - - "{{ kb_install_dir }}" - - "{{ kb_config_dir }}" - tags: kpm-install - - name: generate Tomcat files become: true template: @@ -28,3 +20,50 @@ kpm_path: "{{ kpm_path }}" kpm_yml: "{{ kpm_yml }}" tags: kpm-install + +- name: Create ROOT directory to unarchive the WAR + become: true + file: + path: "{{ catalina_base }}/{{ kaui_webapps }}/ROOT" + state: directory + mode: '0755' + +- name: Expand WAR file if not already done + become: true + command: | + "{{ java_home }}/bin/jar" -xf ../ROOT.war + args: + chdir: "{{ catalina_base }}/{{ kaui_webapps }}/ROOT" + creates: "{{ catalina_base }}/{{ kaui_webapps }}/ROOT/WEB-INF" + tags: kaui-logback + +- name: Copy logback.xml + become: true + template: + src: "kaui/{{ item.name }}.j2" + dest: "{{ catalina_base }}/{{ kaui_webapps }}/ROOT/WEB-INF/classes/logback.xml" + mode: u=rw,g=r,o=r + owner: "{{ tomcat_owner }}" + group: "{{ tomcat_group }}" + force: true + with_items: + - name: logback.xml + tags: kaui-logback + +- name: Recursively change ownership for the expanded ROOT + become: true + file: + path: "{{ catalina_base }}/{{ kaui_webapps }}/ROOT" + state: directory + recurse: yes + owner: "{{ tomcat_owner }}" + group: "{{ tomcat_group }}" + mode: u=rwX,g=rX,o=rX + tags: kaui-logback + +- name: Remove root archive + become: true + file: + path: "{{ catalina_base }}/{{ kaui_webapps }}/ROOT.war" + state: absent + tags: kaui-logback diff --git a/ansible/roles/killbill/tasks/main.yml b/ansible/roles/killbill/tasks/main.yml index b8b51553..cad76e2e 100644 --- a/ansible/roles/killbill/tasks/main.yml +++ b/ansible/roles/killbill/tasks/main.yml @@ -18,7 +18,6 @@ force: no with_items: - name: killbill.properties - - name: logback.xml tags: kpm-install - name: generate Tomcat files @@ -43,3 +42,50 @@ kpm_path: "{{ kpm_path }}" kpm_yml: "{{ kpm_yml }}" tags: kpm-install + +- name: Create ROOT directory to unarchive the WAR + become: true + file: + path: "{{ catalina_base }}/{{ kb_webapps }}/ROOT" + state: directory + mode: '0755' + +- name: Expand WAR file if not already done + become: true + command: | + "{{ java_home }}/bin/jar" -xf ../ROOT.war + args: + chdir: "{{ catalina_base }}/{{ kb_webapps }}/ROOT" + creates: "{{ catalina_base }}/{{ kb_webapps }}/ROOT/WEB-INF" + tags: killbill-logback + +- name: Copy logback.xml + become: true + template: + src: "killbill/{{ item.name }}.j2" + dest: "{{ catalina_base }}/{{ kb_webapps }}/ROOT/WEB-INF/classes/logback.xml" + mode: u=rw,g=r,o=r + owner: "{{ tomcat_owner }}" + group: "{{ tomcat_group }}" + force: true + with_items: + - name: logback.xml + tags: killbill-logback + +- name: Recursively change ownership for the expanded ROOT + become: true + file: + path: "{{ catalina_base }}/{{ kb_webapps }}/ROOT" + state: directory + recurse: yes + owner: "{{ tomcat_owner }}" + group: "{{ tomcat_group }}" + mode: u=rwX,g=rX,o=rX + tags: killbill-logback + +- name: Remove root archive + become: true + file: + path: "{{ catalina_base }}/{{ kb_webapps }}/ROOT.war" + state: absent + tags: killbill-logback diff --git a/ansible/roles/kpm/tasks/main.yml b/ansible/roles/kpm/tasks/main.yml index ba7c9f22..3e799982 100644 --- a/ansible/roles/kpm/tasks/main.yml +++ b/ansible/roles/kpm/tasks/main.yml @@ -6,11 +6,16 @@ state: directory tags: kpm +- name: check if KPM is already installed + stat: + path: "{{ kpm_path }}/kpm" + register: kpm_bin + - name: install KPM become: true unarchive: src: "{{ nexus_url }}/content/repositories/{{ nexus_repository }}/org/kill-bill/billing/installer/kpm/{{ kpm_version }}/kpm-{{ kpm_version }}-linux-{{ ansible_architecture }}.tar.gz" remote_src: True dest: "{{ kpm_install_dir }}" + when: kpm_bin.stat.exists == False tags: kpm - diff --git a/ansible/roles/migrations/tasks/flyway.yml b/ansible/roles/migrations/tasks/flyway.yml new file mode 100644 index 00000000..a8849e54 --- /dev/null +++ b/ansible/roles/migrations/tasks/flyway.yml @@ -0,0 +1,39 @@ +--- +- name: ensure Flyway install dir exists + become: true + file: path={{ flyway_install_dir }} state=directory owner={{ tomcat_owner }} group={{ tomcat_group }} + tags: migrations + +# Note: we don't check the version but the binary is rarely updated +- name: check if Flyway is already installed + stat: + path: "{{ flyway_install_dir }}/killbill-flyway.jar" + register: flyway_bin + tags: migrations + +- block: + - name: resolve LATEST version + killbill_facts: + kpm_path: "{{ kpm_path }}" + bundles_dir: "{{ kb_plugins_dir }}" + kaui_web_path: "{{ catalina_base }}/{{ kaui_webapps }}/ROOT.war" + killbill_web_path: "{{ catalina_base }}/{{ kb_webapps }}/ROOT.war" + version: LATEST + register: kb_facts + when: flyway_version is undefined + tags: migrations + + - name: set flyway_version + set_fact: + flyway_version: "{{ kb_facts['ansible_facts']['versions_info']['killbill'] }}" + when: flyway_version is undefined + tags: migrations + + - name: install Flyway + # maven_artifact module requires xml on the host + get_url: + url: "{{ nexus_url }}/content/repositories/{{ nexus_repository }}/org/kill-bill/billing/killbill-util/{{ flyway_version }}/killbill-util-{{ flyway_version }}-flyway.jar" + dest: "{{ flyway_install_dir }}/killbill-flyway.jar" + tags: migrations + when: flyway_bin.stat.exists == False + tags: migrations diff --git a/ansible/roles/migrations/tasks/main.yml b/ansible/roles/migrations/tasks/main.yml index 53a36eea..48bc5f59 100644 --- a/ansible/roles/migrations/tasks/main.yml +++ b/ansible/roles/migrations/tasks/main.yml @@ -3,59 +3,51 @@ killbill_migrations: kpm_path: "{{ kpm_path }}" bundles_dir: "{{ kb_plugins_dir }}" - kaui_web_path: "{{ catalina_base }}/webapps/ROOT.war" - killbill_web_path: "{{ catalina_base }}/webapps/ROOT.war" + kaui_web_path: "{{ catalina_base }}/{{ kaui_webapps }}/ROOT.war" + killbill_web_path: "{{ catalina_base }}/{{ kb_webapps }}/ROOT.war" kpm_yml: "{{ kpm_yml }}" - gh_token: "{{ gh_token }}" + gh_token: "{{ gh_token|default('') }}" register: migrations tags: migrations -- name: install Flyway - # maven_artifact module requires xml on the host - get_url: - url: "{{ nexus_url }}/content/repositories/{{ nexus_repository }}/org/kill-bill/billing/killbill-util/0.18.11/killbill-util-0.18.11-flyway.jar" - dest: "{{ kb_install_dir }}/killbill-flyway.jar" - tags: [migrations, java_migrations] +- include_tasks: flyway.yml + vars: + flyway_version: "{{ migrations['migrations']['from'] }}" -- name: generate SQL migrations for Kill Bill and Java plugins - command: java -jar {{ kb_install_dir }}/killbill-flyway.jar -locations=filesystem:{{ item['dir'] }} -table={{ item['table'] }} -url='{{ database_url }}' -user={{ database_user }} -password={{ database_password }} dryRunMigrate +- name: generate Flyway baseline tables + command: "{{ flyway }} -locations=filesystem:{{ item['dir'] }} -table={{ item['table'] }} baseline" with_items: - "{{ migrations['migrations']['killbill'] }}" - "{{ migrations['migrations']['plugins']['java'] }}" when: item['dir'] is defined - register: java_dry_run_migrations - tags: [migrations, java_migrations] - -- name: SQL migrations for Kill Bill and Java plugins - debug: var=item.stdout_lines - with_items: "{{ java_dry_run_migrations.results }}" - when: item.stdout_lines is defined - tags: [migrations, java_migrations] - -- block: - - name: wait for confirmation for Kill Bill and Java plugins - pause: prompt='Should I run these migrations? Press return to continue. Press Ctrl+c and then "a" to abort' - tags: [migrations, java_migrations] + register: baselineout + failed_when: baselineout.rc != 0 and 'as it already contains migrations' not in baselineout.stderr + changed_when: "'already initialized with' not in baselineout.stdout and 'as it already contains migrations' not in baselineout.stderr" + tags: migrations - - name: run migrations for Kill Bill and Java plugins - command: java -jar {{ kb_install_dir }}/killbill-flyway.jar -locations=filesystem:{{ item['dir'] }} -table={{ item['table'] }} -url='{{ database_url }}' -user={{ database_user }} -password={{ database_password }} migrate - with_items: - - "{{ migrations['migrations']['killbill'] }}" - - "{{ migrations['migrations']['plugins']['java'] }}" - when: item['dir'] is defined - register: java_migrations - tags: [migrations, java_migrations] +# We verify that all migrations can be generated before attempting to run them one by one +- name: validate SQL migrations for Kill Bill and Java plugins + command: "{{ flyway }} -locations=filesystem:{{ item['dir'] }} -table={{ item['table'] }} dryRunMigrate" + with_items: + - "{{ migrations['migrations']['killbill'] }}" + - "{{ migrations['migrations']['plugins']['java'] }}" + when: item['dir'] is defined + changed_when: False + register: java_dry_run_migrations + tags: migrations - - name: Flyway migrations output for Kill Bill and Java plugins - debug: var=item.stdout_lines - with_items: "{{ java_migrations.results }}" - when: item.stdout_lines is defined - tags: [migrations, java_migrations] +# Run core migrations +- include_tasks: migrate.yml + loop: + - "{{ migrations['migrations']['killbill'] }}" + loop_control: + loop_var: migration + when: migration['dir'] is defined - when: java_dry_run_migrations.stdout.find("BEGIN;\nCOMMIT;") != -1 +# Run plugin migrations +- include_tasks: migrate.yml + loop: "{{ migrations['migrations']['plugins']['java']|flatten(levels=1) }}" + loop_control: + loop_var: migration + when: migration['dir'] is defined -# TODO -# - name: generate SQL migrations for Ruby plugins -# local_action: killbill-migration -# with_items: "{{ migrations['migrations']['plugins']['ruby'] }}" -# tags: [migrations, ruby_migrations] diff --git a/ansible/roles/migrations/tasks/migrate.yml b/ansible/roles/migrations/tasks/migrate.yml new file mode 100644 index 00000000..b0013c64 --- /dev/null +++ b/ansible/roles/migrations/tasks/migrate.yml @@ -0,0 +1,47 @@ +- name: generate SQL migration + command: "{{ flyway }} -locations=filesystem:{{ migration['dir'] }} -table={{ migration['table'] }} dryRunMigrate" + register: java_dry_run_migrations + changed_when: java_dry_run_migrations.stdout_lines + tags: migrations + +- debug: msg="No migration to run for {{ migration['from_tag'] }} -> {{ migration['to_tag'] }}" + when: java_dry_run_migrations.stdout.find("BEGIN;\nCOMMIT;") != -1 + tags: migrations + +- block: + - debug: msg="{{ java_dry_run_migrations.stdout_lines }}" + when: java_dry_run_migrations.stdout_lines + tags: migrations + + - name: prompt for SQL migration + pause: prompt='Should I run these migrations? Enter yes or no' + register: should_continue + when: java_dry_run_migrations.stdout_lines is defined + tags: migrations + + - block: + - name: run SQL migration + command: "{{ flyway }} -locations=filesystem:{{ migration['dir'] }} -table={{ migration['table'] }} -validateOnMigrate=false migrate" + register: java_migrations + ignore_errors: True + tags: migrations + + - debug: msg="{{ java_migrations.stdout_lines }}" + when: java_migrations.stdout_lines + tags: migrations + + - debug: msg="{{ java_migrations.stderr_lines }}" + when: java_migrations.stderr_lines + tags: migrations + + - name: fail the play if the schema_version table is corrupted + fail: msg="schema_version corrupted. Try running {{ flyway }} -locations=filesystem:{{ migration['dir'] }} -table={{ migration['table'] }} repair" + when: java_migrations.rc != 0 and 'contains a failed migration' in java_migrations.stderr + tags: migrations + + - name: fail the play if the migrations did not succeed + fail: msg="Migrations failed. You need to fix the tables, adjust the schema_version table manually (i.e. insert a line for that migration or update the status to success) and run {{ flyway }} -locations=filesystem:{{ migration['dir'] }} -table={{ migration['table'] }} repair" + when: java_migrations.rc != 0 and 'contains a failed migration' not in java_migrations.stderr + tags: migrations + when: should_continue.user_input | bool + when: java_dry_run_migrations.stdout.find("BEGIN;\nCOMMIT;") == -1 diff --git a/ansible/roles/tomcat/tasks/main.yml b/ansible/roles/tomcat/tasks/main.yml index d8c4a73e..cce5dcbd 100644 --- a/ansible/roles/tomcat/tasks/main.yml +++ b/ansible/roles/tomcat/tasks/main.yml @@ -7,7 +7,8 @@ - conf - lib - logs - - webapps + - "{{ kb_webapps }}" + - "{{ kaui_webapps }}" - work - temp tags: tomcat diff --git a/ansible/roles/tomcat/tasks/restart.yml b/ansible/roles/tomcat/tasks/restart.yml index f8e5b7eb..6b02c2c8 100644 --- a/ansible/roles/tomcat/tasks/restart.yml +++ b/ansible/roles/tomcat/tasks/restart.yml @@ -1,42 +1,5 @@ --- -- name: check Tomcat PID file - stat: path="{{ catalina_base }}/tomcat.pid" - register: tomcat_pid - when: - - catalina_home is defined - -- name: stop Tomcat (with PID file) - become: true - become_user: "{{ tomcat_owner }}" - environment: - CATALINA_BASE: "{{ catalina_base }}" - CATALINA_PID: "{{ catalina_base }}/tomcat.pid" - command: "{{ catalina_home }}/bin/catalina.sh stop 30 -force" - ignore_errors: True - when: - - catalina_home is defined - - tomcat_pid is not defined or tomcat_pid.stat.exists == True - -- name: stop Tomcat (without PID file) - become: true - become_user: "{{ tomcat_owner }}" - environment: - CATALINA_BASE: "{{ catalina_base }}" - command: "{{ catalina_home }}/bin/catalina.sh stop 30 -force" - ignore_errors: True - when: - - catalina_home is defined - - tomcat_pid is not defined or tomcat_pid.stat.exists == False - -- name: clean up Tomcat deployment files - become: true - become_user: "{{ tomcat_owner }}" - file: path={{ catalina_base }}/{{ item }} state=absent - with_items: - - webapps/ROOT - - work/ - when: - - catalina_home is defined +- include: stop.yml - name: start Tomcat become: true diff --git a/ansible/roles/tomcat/tasks/stop.yml b/ansible/roles/tomcat/tasks/stop.yml new file mode 100644 index 00000000..78856082 --- /dev/null +++ b/ansible/roles/tomcat/tasks/stop.yml @@ -0,0 +1,41 @@ +--- +- name: check Tomcat PID file + stat: path="{{ catalina_base }}/tomcat.pid" + register: tomcat_pid + when: + - catalina_home is defined + +- name: stop Tomcat (with PID file) + become: true + become_user: "{{ tomcat_owner }}" + environment: + CATALINA_BASE: "{{ catalina_base }}" + CATALINA_PID: "{{ catalina_base }}/tomcat.pid" + command: "{{ catalina_home }}/bin/catalina.sh stop 30 -force" + ignore_errors: True + when: + - catalina_home is defined + - tomcat_pid is not defined or tomcat_pid.stat.exists == True + +- name: stop Tomcat (without PID file) + become: true + become_user: "{{ tomcat_owner }}" + environment: + CATALINA_BASE: "{{ catalina_base }}" + command: "{{ catalina_home }}/bin/catalina.sh stop 30 -force" + ignore_errors: True + when: + - catalina_home is defined + - tomcat_pid is not defined or tomcat_pid.stat.exists == False + +- name: clean up Tomcat deployment files + become: true + become_user: "{{ tomcat_owner }}" + file: path={{ catalina_base }}/{{ item }} state=absent + with_items: + - "{{ kb_webapps }}/ROOT" + - work/ + when: + - catalina_home is defined + tags: + - tomcat_cleanup diff --git a/ansible/templates/kaui/conf/setenv2.sh.j2 b/ansible/templates/kaui/conf/setenv2.sh.j2 index cbf6f9e6..749227cd 100644 --- a/ansible/templates/kaui/conf/setenv2.sh.j2 +++ b/ansible/templates/kaui/conf/setenv2.sh.j2 @@ -1,10 +1,24 @@ # Java Properties export CATALINA_OPTS="$CATALINA_OPTS - -Dkaui.db.adapter={{ kaui_db_adapter }} - -Dkaui.url={{ killbill_url }} - -Dkaui.api_key={{ killbill_api_key }} - -Dkaui.api_secret={{ killbill_api_secret }} - -Dkaui.db.url={{ database_url }} - -Dkaui.db.password={{ database_user }} - -Dkaui.db.username={{ database_password }} +{% if lookup('env', 'KAUI_CONFIG_DAO_ADAPTER') %} + -Dkaui.db.adapter={{ lookup('env', 'KAUI_CONFIG_DAO_ADAPTER') }} +{% endif %} +{% if lookup('env', 'KAUI_KILLBILL_URL') %} + -Dkaui.url={{ lookup('env', 'KAUI_KILLBILL_URL') }} +{% endif %} +{% if lookup('env', 'KAUI_KILLBILL_API_KEY') %} + -Dkaui.api_key={{ lookup('env', 'KAUI_KILLBILL_API_KEY') }} +{% endif %} +{% if lookup('env', 'KAUI_KILLBILL_API_SECRET') %} + -Dkaui.api_secret={{ lookup('env', 'KAUI_KILLBILL_API_SECRET') }} +{% endif %} +{% if lookup('env', 'KAUI_CONFIG_DAO_URL') %} + -Dkaui.db.url={{ lookup('env', 'KAUI_CONFIG_DAO_URL') }} +{% endif %} +{% if lookup('env', 'KAUI_CONFIG_DAO_PASSWORD') %} + -Dkaui.db.password={{ lookup('env', 'KAUI_CONFIG_DAO_PASSWORD') }} +{% endif %} +{% if lookup('env', 'KAUI_CONFIG_DAO_USER') %} + -Dkaui.db.username={{ lookup('env', 'KAUI_CONFIG_DAO_USER') }} +{% endif %} {{ kaui_system_properties }}" diff --git a/ansible/templates/kaui/logback.xml.j2 b/ansible/templates/kaui/logback.xml.j2 new file mode 100644 index 00000000..5f0621ec --- /dev/null +++ b/ansible/templates/kaui/logback.xml.j2 @@ -0,0 +1,39 @@ + + + + + + ${LOGS_DIR:-./logs}/kaui.out + + + ${LOGS_DIR:-./logs}/kaui-%d{yyyy-MM-dd}.%i.out.gz + 3 + true + + + 100MB + + + + %date{"yyyy-MM-dd'T'HH:mm:ss,SSSZ", UTC} lvl='%level', log='%X{rails.actionName}', th='%thread', xff='%X{req.xForwardedFor}', rId='%X{req.requestId}', aId='%X{kb.accountId}', tId='%X{kb.tenantId}', %msg%n + + + + + + + diff --git a/ansible/templates/killbill/conf/setenv2.sh.j2 b/ansible/templates/killbill/conf/setenv2.sh.j2 index 25759f3e..9319a656 100644 --- a/ansible/templates/killbill/conf/setenv2.sh.j2 +++ b/ansible/templates/killbill/conf/setenv2.sh.j2 @@ -5,6 +5,5 @@ export CATALINA_OPTS="$CATALINA_OPTS {% else %} -Dorg.killbill.queue.creator.name={{ inventory_hostname }} {% endif %} - -Dlogback.configurationFile={{ kb_config_dir }}/logback.xml -Dorg.killbill.server.properties=file://{{ kb_config_dir }}/killbill.properties {{ kb_system_properties }}" diff --git a/ansible/templates/killbill/killbill.properties.j2 b/ansible/templates/killbill/killbill.properties.j2 index 34cd6771..5f0be2aa 100644 --- a/ansible/templates/killbill/killbill.properties.j2 +++ b/ansible/templates/killbill/killbill.properties.j2 @@ -48,7 +48,8 @@ org.killbill.billing.osgi.dao.mysqlServerVersion={{ lookup('env', 'KILLBILL_DAO_ {% endif %} {% if lookup('env', 'KILLBILL_OSGI_DAO_PASSWORD') %} org.killbill.billing.osgi.dao.password={{ lookup('env', 'KILLBILL_OSGI_DAO_PASSWORD') }} -{% elif lookup('env', 'KILLBILL_DAO_PASSWORD') %} +{% else %} +{# Allow for blank passwords (SSL certificates usecase) #} org.killbill.billing.osgi.dao.password={{ lookup('env', 'KILLBILL_DAO_PASSWORD') }} {% endif %} {% if lookup('env', 'KILLBILL_OSGI_DAO_POOLING_TYPE') %} @@ -132,9 +133,8 @@ org.killbill.dao.minIdle={{ lookup('env', 'KILLBILL_DAO_MIN_IDLE') }} {% if lookup('env', 'KILLBILL_DAO_MYSQL_SERVER_VERSION') %} org.killbill.dao.mysqlServerVersion={{ lookup('env', 'KILLBILL_DAO_MYSQL_SERVER_VERSION') }} {% endif %} -{% if lookup('env', 'KILLBILL_DAO_PASSWORD') %} +{# Allow for blank passwords (SSL certificates usecase) #} org.killbill.dao.password={{ lookup('env', 'KILLBILL_DAO_PASSWORD') }} -{% endif %} {% if lookup('env', 'KILLBILL_DAO_POOLING_TYPE') %} org.killbill.dao.poolingType={{ lookup('env', 'KILLBILL_DAO_POOLING_TYPE') }} {% endif %} @@ -255,9 +255,24 @@ org.killbill.notificationq.analytics.max.failure.retry={{ lookup('env', 'KILLBIL {% if lookup('env', 'KILLBILL_ANALYTICS_NOTIFICATION_NB_THREADS') %} org.killbill.notificationq.analytics.notification.nbThreads={{ lookup('env', 'KILLBILL_ANALYTICS_NOTIFICATION_NB_THREADS') }} {% endif %} +{% if lookup('env', 'KILLBILL_ANALYTICS_LIFECYCLE_DISPATCH_NBTHREADS') %} +org.killbill.notificationq.analytics.lifecycle.dispatch.nbThreads={{ lookup('env', 'KILLBILL_ANALYTICS_LIFECYCLE_DISPATCH_NBTHREADS') }} +{% endif %} +{% if lookup('env', 'KILLBILL_ANALYTICS_LIFECYCLE_COMPLETE_NBTHREADS') %} +org.killbill.notificationq.analytics.lifecycle.complete.nbThreads={{ lookup('env', 'KILLBILL_ANALYTICS_LIFECYCLE_COMPLETE_NBTHREADS') }} +{% endif %} {% if lookup('env', 'KILLBILL_ANALYTICS_QUEUE_CAPACITY') %} org.killbill.notificationq.analytics.queue.capacity={{ lookup('env', 'KILLBILL_ANALYTICS_QUEUE_CAPACITY') }} {% endif %} +{% if lookup('env', 'KILLBILL_ANALYTICS_REAP_THRESHOLD') %} +org.killbill.notificationq.analytics.reapThreshold={{ lookup('env', 'KILLBILL_ANALYTICS_REAP_THRESHOLD') }} +{% endif %} +{% if lookup('env', 'KILLBILL_ANALYTICS_MAX_REDISPATCH_COUNT') %} +org.killbill.notificationq.analytics.maxReDispatchCount={{ lookup('env', 'KILLBILL_ANALYTICS_MAX_REDISPATCH_COUNT') }} +{% endif %} +{% if lookup('env', 'KILLBILL_ANALYTICS_REAP_SCHEDULE') %} +org.killbill.notificationq.analytics.reapSchedule={{ lookup('env', 'KILLBILL_ANALYTICS_REAP_SCHEDULE') }} +{% endif %} {% if lookup('env', 'KILLBILL_NOTIFICATIONQ_ANALYTICS_SLEEP') %} org.killbill.notificationq.analytics.sleep={{ lookup('env', 'KILLBILL_NOTIFICATIONQ_ANALYTICS_SLEEP') }} {% endif %} @@ -285,9 +300,24 @@ org.killbill.notificationq.main.notification.nbThreads={{ lookup('env', 'KILLBIL {% if lookup('env', 'KILLBILL_MAIN_NOTIFICATION_OFF') %} org.killbill.notificationq.main.notification.off={{ lookup('env', 'KILLBILL_MAIN_NOTIFICATION_OFF') }} {% endif %} +{% if lookup('env', 'KILLBILL_MAIN_LIFECYCLE_DISPATCH_NBTHREADS') %} +org.killbill.notificationq.main.lifecycle.dispatch.nbThreads={{ lookup('env', 'KILLBILL_MAIN_LIFECYCLE_DISPATCH_NBTHREADS') }} +{% endif %} +{% if lookup('env', 'KILLBILL_MAIN_LIFECYCLE_COMPLETE_NBTHREADS') %} +org.killbill.notificationq.main.lifecycle.complete.nbThreads={{ lookup('env', 'KILLBILL_MAIN_LIFECYCLE_COMPLETE_NBTHREADS') }} +{% endif %} {% if lookup('env', 'KILLBILL_MAIN_QUEUE_CAPACITY') %} org.killbill.notificationq.main.queue.capacity={{ lookup('env', 'KILLBILL_MAIN_QUEUE_CAPACITY') }} {% endif %} +{% if lookup('env', 'KILLBILL_MAIN_REAP_THRESHOLD') %} +org.killbill.notificationq.main.reapThreshold={{ lookup('env', 'KILLBILL_MAIN_REAP_THRESHOLD') }} +{% endif %} +{% if lookup('env', 'KILLBILL_MAIN_MAX_REDISPATCH_COUNT') %} +org.killbill.notificationq.main.maxReDispatchCount={{ lookup('env', 'KILLBILL_MAIN_MAX_REDISPATCH_COUNT') }} +{% endif %} +{% if lookup('env', 'KILLBILL_MAIN_REAP_SCHEDULE') %} +org.killbill.notificationq.main.reapSchedule={{ lookup('env', 'KILLBILL_MAIN_REAP_SCHEDULE') }} +{% endif %} {% if lookup('env', 'KILLBILL_MAIN_QUEUE_MODE') %} org.killbill.notificationq.main.queue.mode={{ lookup('env', 'KILLBILL_MAIN_QUEUE_MODE') }} {% endif %} @@ -378,9 +408,24 @@ org.killbill.persistent.bus.external.max.failure.retry={{ lookup('env', 'KILLBIL {% if lookup('env', 'KILLBILL_BUS_EXTERNAL_NB_THREADS') %} org.killbill.persistent.bus.external.nbThreads={{ lookup('env', 'KILLBILL_BUS_EXTERNAL_NB_THREADS') }} {% endif %} +{% if lookup('env', 'KILLBILL_BUS_EXTERNAL_LIFECYCLE_DISPATCH_NBTHREADS') %} +org.killbill.persistent.bus.external.lifecycle.dispatch.nbThreads={{ lookup('env', 'KILLBILL_BUS_EXTERNAL_LIFECYCLE_DISPATCH_NBTHREADS') }} +{% endif %} +{% if lookup('env', 'KILLBILL_BUS_EXTERNAL_LIFECYCLE_COMPLETE_NBTHREADS') %} +org.killbill.persistent.bus.external.lifecycle.complete.nbThreads={{ lookup('env', 'KILLBILL_BUS_EXTERNAL_LIFECYCLE_COMPLETE_NBTHREADS') }} +{% endif %} {% if lookup('env', 'KILLBILL_EXTERNAL_QUEUE_CAPACITY') %} org.killbill.persistent.bus.external.queue.capacity={{ lookup('env', 'KILLBILL_EXTERNAL_QUEUE_CAPACITY') }} {% endif %} +{% if lookup('env', 'KILLBILL_BUS_EXTERNAL_REAP_THRESHOLD') %} +org.killbill.persistent.bus.external.reapThreshold={{ lookup('env', 'KILLBILL_BUS_EXTERNAL_REAP_THRESHOLD') }} +{% endif %} +{% if lookup('env', 'KILLBILL_BUS_EXTERNAL_MAX_REDISPATCH_COUNT') %} +org.killbill.persistent.bus.external.maxReDispatchCount={{ lookup('env', 'KILLBILL_BUS_EXTERNAL_MAX_REDISPATCH_COUNT') }} +{% endif %} +{% if lookup('env', 'KILLBILL_BUS_EXTERNAL_REAP_SCHEDULE') %} +org.killbill.persistent.bus.external.reapSchedule={{ lookup('env', 'KILLBILL_BUS_EXTERNAL_REAP_SCHEDULE') }} +{% endif %} {% if lookup('env', 'KILLBILL_BUS_EXTERNAL_SLEEP') %} org.killbill.persistent.bus.external.sleep={{ lookup('env', 'KILLBILL_BUS_EXTERNAL_SLEEP') }} {% endif %} @@ -411,9 +456,24 @@ org.killbill.persistent.bus.main.nbThreads={{ lookup('env', 'KILLBILL_BUS_MAIN_N {% if lookup('env', 'KILLBILL_BUS_MAIN_OFF') %} org.killbill.persistent.bus.main.off={{ lookup('env', 'KILLBILL_BUS_MAIN_OFF') }} {% endif %} +{% if lookup('env', 'KILLBILL_BUS_MAIN_LIFECYCLE_DISPATCH_NBTHREADS') %} +org.killbill.persistent.bus.main.lifecycle.dispatch.nbThreads={{ lookup('env', 'KILLBILL_BUS_MAIN_LIFECYCLE_DISPATCH_NBTHREADS') }} +{% endif %} +{% if lookup('env', 'KILLBILL_BUS_MAIN_LIFECYCLE_COMPLETE_NBTHREADS') %} +org.killbill.persistent.bus.main.lifecycle.complete.nbThreads={{ lookup('env', 'KILLBILL_BUS_MAIN_LIFECYCLE_COMPLETE_NBTHREADS') }} +{% endif %} {% if lookup('env', 'KILLBILL_MAIN_QUEUE_CAPACITY') %} org.killbill.persistent.bus.main.queue.capacity={{ lookup('env', 'KILLBILL_MAIN_QUEUE_CAPACITY') }} {% endif %} +{% if lookup('env', 'KILLBILL_BUS_MAIN_REAP_THRESHOLD') %} +org.killbill.persistent.bus.main.reapThreshold={{ lookup('env', 'KILLBILL_BUS_MAIN_REAP_THRESHOLD') }} +{% endif %} +{% if lookup('env', 'KILLBILL_BUS_MAIN_MAX_REDISPATCH_COUNT') %} +org.killbill.persistent.bus.main.maxReDispatchCount={{ lookup('env', 'KILLBILL_BUS_MAIN_MAX_REDISPATCH_COUNT') }} +{% endif %} +{% if lookup('env', 'KILLBILL_BUS_MAIN_REAP_SCHEDULE') %} +org.killbill.persistent.bus.main.reapSchedule={{ lookup('env', 'KILLBILL_BUS_MAIN_REAP_SCHEDULE') }} +{% endif %} {% if lookup('env', 'KILLBILL_MAIN_QUEUE_MODE') %} org.killbill.persistent.bus.main.queue.mode={{ lookup('env', 'KILLBILL_MAIN_QUEUE_MODE') }} {% endif %} @@ -462,3 +522,30 @@ org.killbill.template.name={{ lookup('env', 'KILLBILL_TEMPLATE_NAME') }} {% if lookup('env', 'KILLBILL_TENANT_BROADCAST_RATE') %} org.killbill.tenant.broadcast.rate={{ lookup('env', 'KILLBILL_TENANT_BROADCAST_RATE') }} {% endif %} +{% if lookup('env', 'KILLBILL_KPM_USERNAME') %} +org.killbill.billing.plugin.kpm.adminUsername={{ lookup('env', 'KILLBILL_KPM_USERNAME') }} +{% endif %} +{% if lookup('env', 'KILLBILL_KPM_PASSWORD') %} +org.killbill.billing.plugin.kpm.adminPassword={{ lookup('env', 'KILLBILL_KPM_PASSWORD') }} +{% endif %} +{% if lookup('env', 'KILLBILL_KPM_PATH') %} +org.killbill.billing.plugin.kpm.kpmPath={{ lookup('env', 'KILLBILL_KPM_PATH') }} +{% endif %} +{% if lookup('env', 'KILLBILL_KPM_BUNDLES_PATH') %} +org.killbill.billing.plugin.kpm.bundlesPath={{ lookup('env', 'KILLBILL_KPM_BUNDLES_PATH') }} +{% endif %} +{% if lookup('env', 'KILLBILL_KPM_NEXUS_URL') %} +org.killbill.billing.plugin.kpm.nexusUrl={{ lookup('env', 'KILLBILL_KPM_NEXUS_URL') }} +{% endif %} +{% if lookup('env', 'KILLBILL_KPM_NEXUS_REPOSITORY') %} +org.killbill.billing.plugin.kpm.nexusRepository={{ lookup('env', 'KILLBILL_KPM_NEXUS_REPOSITORY') }} +{% endif %} +{% if lookup('env', 'KILLBILL_KPM_STRICT_SSL') %} +org.killbill.billing.plugin.kpm.strictSSL={{ lookup('env', 'KILLBILL_KPM_STRICT_SSL') }} +{% endif %} +{% if lookup('env', 'KILLBILL_KPM_READ_TIMEOUT_SEC') %} +org.killbill.billing.plugin.kpm.readTimeoutSec={{ lookup('env', 'KILLBILL_KPM_READ_TIMEOUT_SEC') }} +{% endif %} +{% if lookup('env', 'KILLBILL_KPM_CONNECT_TIMEOUT_SEC') %} +org.killbill.billing.plugin.kpm.connectTimeoutSec={{ lookup('env', 'KILLBILL_KPM_CONNECT_TIMEOUT_SEC') }} +{% endif %} diff --git a/ansible/templates/tomcat/conf/server.xml.j2 b/ansible/templates/tomcat/conf/server.xml.j2 index 8845133e..11544f04 100644 --- a/ansible/templates/tomcat/conf/server.xml.j2 +++ b/ansible/templates/tomcat/conf/server.xml.j2 @@ -27,11 +27,15 @@ unpackWARs="true" autoDeploy="false"> + + + pattern="%h %l %u %t "%m %U" %s %b %D %{X-Request-id}i" /> diff --git a/ansible/templates/tomcat/conf/setenv.sh.j2 b/ansible/templates/tomcat/conf/setenv.sh.j2 index c86b895d..62b00dd9 100644 --- a/ansible/templates/tomcat/conf/setenv.sh.j2 +++ b/ansible/templates/tomcat/conf/setenv.sh.j2 @@ -7,8 +7,16 @@ CATALINA_OPTS="-server -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath={{ catalina_base }}/logs/ -XX:+UseCodeCacheFlushing +{% if lookup('env', 'TOMCAT_JAVA_XMS') %} + -Xms{{ lookup('env', 'TOMCAT_JAVA_XMS') }} +{% else %} -Xms{{ jvm_initial_memory }} +{% endif %} +{% if lookup('env', 'TOMCAT_JAVA_XMX') %} + -Xmx{{ lookup('env', 'TOMCAT_JAVA_XMX') }} +{% else %} -Xmx{{ jvm_max_memory }} +{% endif %} -Xrunjdwp:transport=dt_socket,server={{ jvm_jdwp_server }},suspend=n,address={{ jvm_jdwp_port }} -XX:+CMSClassUnloadingEnabled -XX:-OmitStackTraceInFastThrow @@ -20,10 +28,19 @@ CATALINA_OPTS="-server -XX:+CMSParallelRemarkEnabled -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction={{ jvm_cms_initiating_fraction_threshold }} +{% if lookup('env', 'TOMCAT_JAVA_NEW_SIZE') %} + -XX:NewSize={{ lookup('env', 'TOMCAT_JAVA_NEW_SIZE') }} +{% else %} -XX:NewSize={{ jvm_new_size }} +{% endif %} +{% if lookup('env', 'TOMCAT_JAVA_MAX_NEW_SIZE') %} + -XX:MaxNewSize={{ lookup('env', 'TOMCAT_JAVA_MAX_NEW_SIZE') }} +{% else %} -XX:MaxNewSize={{ jvm_max_new_size }} +{% endif %} -XX:SurvivorRatio={{ jvm_survivor_ratio }} -XX:+DisableExplicitGC +{% if not lookup('env', 'TOMCAT_DISABLE_GC_LOGGING') %} -Xloggc:{{ catalina_base }}/logs/gc.log -XX:+PrintGCApplicationConcurrentTime -XX:+PrintGCApplicationStoppedTime @@ -32,7 +49,9 @@ CATALINA_OPTS="-server -XX:+PrintTenuringDistribution -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=14 - -XX:GCLogFileSize=100M" + -XX:GCLogFileSize=100M +{% endif %} + " # Java Properties export CATALINA_OPTS="$CATALINA_OPTS diff --git a/ansible/tomcat_stop.yml b/ansible/tomcat_stop.yml new file mode 100644 index 00000000..66a02eda --- /dev/null +++ b/ansible/tomcat_stop.yml @@ -0,0 +1,7 @@ +--- +- name: Stop Tomcat + hosts: all + become: yes + tasks: + - name: stop Tomcat + import_tasks: roles/tomcat/tasks/stop.yml diff --git a/docker/README.adoc b/docker/README.adoc index f3f2c65e..2bfa3abb 100644 --- a/docker/README.adoc +++ b/docker/README.adoc @@ -17,6 +17,15 @@ toc::[] * `killbill/kaui:0.X.Y`: image with a specific version of Kaui installed. * `killbill/killbill:build`: official build environment for all published Kill Bill artifacts (useful for developers). +[[tips-and-tricks]] +### Tips and tricks + +The container STDOUT isn't that useful: to access logs, look at the `killbill.out` log file instead: + +``` +docker exec CONTAINER_ID tail -f logs/killbill.out +``` + [[environment-variables]] ### Environment variables @@ -28,7 +37,12 @@ For jConsole and remote debugging, you need to pass the host IP address as `ENV_ The following environment variables will populate the default `killbill.properties`. For further customization, you should bind mount `/var/lib/killbill/killbill.properties` when launching the Docker image. * `KILLBILL_ANALYTICS_NOTIFICATION_NB_THREADS` (default `5`) +* `KILLBILL_ANALYTICS_LIFECYCLE_DISPATCH_NBTHREADS` (default `1`) +* `KILLBILL_ANALYTICS_LIFECYCLE_COMPLETE_NBTHREADS` (default `2`) * `KILLBILL_ANALYTICS_QUEUE_CAPACITY` (default `30000`) +* `KILLBILL_ANALYTICS_REAP_THRESHOLD` (default `10m`) +* `KILLBILL_ANALYTICS_MAX_REDISPATCH_COUNT` (default `10`) +* `KILLBILL_ANALYTICS_REAP_SCHEDULE` (default `3m`) * `KILLBILL_BUNDLE_CACHE_NAME` (default `osgi-cache`) * `KILLBILL_BUNDLE_INSTALL_DIR` (default `/var/lib/killbill/bundles`) * `KILLBILL_BUNDLE_PROPERTY_NAME` (default `killbill.properties`) @@ -38,6 +52,11 @@ The following environment variables will populate the default `killbill.properti * `KILLBILL_BUS_EXTERNAL_SLEEP` (default `0`) * `KILLBILL_BUS_EXTERNAL_TABLE_NAME` (default `bus_ext_events`) * `KILLBILL_BUS_EXTERNAL_USE_INFLIGHT_Q` (default `true`) +* `KILLBILL_BUS_EXTERNAL_LIFECYCLE_DISPATCH_NBTHREADS` (default `1`) +* `KILLBILL_BUS_EXTERNAL_LIFECYCLE_COMPLETE_NBTHREADS` (default `2`) +* `KILLBILL_BUS_EXTERNAL_REAP_THRESHOLD` (default `10m`) +* `KILLBILL_BUS_EXTERNAL_MAX_REDISPATCH_COUNT` (default `10`) +* `KILLBILL_BUS_EXTERNAL_REAP_SCHEDULE` (default `3m`) * `KILLBILL_BUS_MAIN_CLAIMED` (default `10`) * `KILLBILL_BUS_MAIN_HISTORY_TABLE_NAME` (default `bus_events_history`) * `KILLBILL_BUS_MAIN_IN_MEMORY` (default `false`) @@ -45,6 +64,11 @@ The following environment variables will populate the default `killbill.properti * `KILLBILL_BUS_MAIN_OFF` (default `false`) * `KILLBILL_BUS_MAIN_SLEEP` (default `0`) * `KILLBILL_BUS_MAIN_TABLE_NAME` (default `bus_events`) +* `KILLBILL_BUS_MAIN_LIFECYCLE_DISPATCH_NBTHREADS` (default `1`) +* `KILLBILL_BUS_MAIN_LIFECYCLE_COMPLETE_NBTHREADS` (default `2`) +* `KILLBILL_BUS_MAIN_REAP_THRESHOLD` (default `10m`) +* `KILLBILL_BUS_MAIN_MAX_REDISPATCH_COUNT` (default `10`) +* `KILLBILL_BUS_MAIN_REAP_SCHEDULE` (default `3m`) * `KILLBILL_CACHE_CONFIG_LOCATION` (default `ehcache.xml`) * `KILLBILL_CATALOG_BUNDLE_PATH` (default `org/killbill/billing/util/template/translation/CatalogTranslation`) * `KILLBILL_CATALOG_URI` (default `SpyCarBasic.xml`) @@ -60,7 +84,7 @@ The following environment variables will populate the default `killbill.properti * `KILLBILL_DAO_MAX_CONNECTION_AGE` (default `0m`) * `KILLBILL_DAO_MIN_IDLE` (default `5`) * `KILLBILL_DAO_MYSQL_SERVER_VERSION` (default `4.0`) -* `KILLBILL_DAO_PASSWORD` (default `killkill`) +* `KILLBILL_DAO_PASSWORD` (default `killbill`) * `KILLBILL_DAO_POOLING_TYPE` (default `HIKARICP`) * `KILLBILL_DAO_PREP_STMT_CACHE_SIZE` (default `500`) * `KILLBILL_DAO_PREP_STMT_CACHE_SQL_LIMIT` (default `2048`) @@ -94,8 +118,12 @@ The following environment variables will populate the default `killbill.properti * `KILLBILL_MAIN_CLAIM_TIME` (default `5m`) * `KILLBILL_MAIN_NOTIFICATION_NB_THREADS` (default `10`) * `KILLBILL_MAIN_NOTIFICATION_OFF` (default `false`) +* `KILLBILL_MAIN_LIFECYCLE_DISPATCH_NBTHREADS` (default `1`) +* `KILLBILL_MAIN_LIFECYCLE_COMPLETE_NBTHREADS` (default `2`) * `KILLBILL_MAIN_QUEUE_CAPACITY` (default `1000000`) -* `KILLBILL_MAIN_QUEUE_CAPACITY` (default `100`) +* `KILLBILL_MAIN_REAP_THRESHOLD` (default `10m`) +* `KILLBILL_MAIN_MAX_REDISPATCH_COUNT` (default `10`) +* `KILLBILL_MAIN_REAP_SCHEDULE` (default `3m`) * `KILLBILL_MAIN_QUEUE_MODE` (default `STICKY_POLLING`) * `KILLBILL_MANUAL_PAY_TEMPLATE_NAME` (default `org/killbill/billing/util/email/templates/HtmlInvoiceTemplate.mustache`) * `KILLBILL_MAX_FAILURE_RETRY` (default `3`) @@ -177,6 +205,8 @@ The following environment variables will populate the default `killbill.properti * `KAUI_CONFIG_DAO_PASSWORD` (default `kaui`) * `KAUI_CONFIG_DEMO` (default `false`) +For PostgreSQL support, you also need to specify `KAUI_CONFIG_DAO_ADAPTER=postgresql`. + [[changes-since-0.18]] ## Changes since 0.18 @@ -260,3 +290,9 @@ docker login docker push killbill/$TARGET:$VERSION docker logout .... + +### Custom ansible playboook + +``` +docker build --no-cache --build-arg KILLBILL_CLOUD_VERSION=work-for-release-0.19.x -t killbill/base:0.19.x . +``` diff --git a/docker/compose/README.adoc b/docker/compose/README.adoc index 99175920..277cfa9b 100644 --- a/docker/compose/README.adoc +++ b/docker/compose/README.adoc @@ -9,7 +9,7 @@ toc::[] ## Quick start -https://docs.docker.com/compose/:[Docker Compose] is the easiest way to setup the Kill Bill stack: +https://docs.docker.com/compose/[Docker Compose] is the easiest way to setup the Kill Bill stack: ``` docker-compose -f docker-compose.kb.yml -p kb up @@ -33,7 +33,7 @@ Notes: * the `Makefile` will look for the Docker container IP via `docker-machine ip default`. Adapt to your setup if necessary. * `docker-compose logs` won’t work, because all logs are forwarded to http://:5601[Elasticsearch]. The easiest way to find logs for a specific container is to search for the container ID in Kibana. -* You need several GB of RAM to run the entire stack in the host: on Mac-OS, open Virtual Box and check the VM has 2GB or more of RAM (you need to poweroff the VM first). +* You need at least 4GB of RAM to run the entire stack properly in the host. The killbill container needs about 2.5GB RAM after start: on Mac-OS, open Virtual Box and check the VM has enough RAM (you need to poweroff the VM first). If you are using Docker for Mac, set its memory limit in Preferences (Docker for Mac Preferences > Advanced > Memory). [[logging]] ### Logging diff --git a/docker/compose/docker-compose.kb.yml b/docker/compose/docker-compose.kb.yml index 85f13a9d..53e89fc4 100644 --- a/docker/compose/docker-compose.kb.yml +++ b/docker/compose/docker-compose.kb.yml @@ -4,7 +4,7 @@ volumes: db: services: killbill: - image: killbill/killbill:0.20.0 + image: killbill/killbill:0.22.0 ports: - "8080:8080" - "8000:8000" @@ -25,7 +25,7 @@ services: - KAUI_KILLBILL_API_KEY=bob - KAUI_KILLBILL_API_SECRET=lazar db: - image: killbill/mariadb:0.20 + image: killbill/mariadb:0.22 volumes: - type: volume source: db diff --git a/docker/templates/base/latest/Dockerfile b/docker/templates/base/latest/Dockerfile index 147c5a6c..a17e2c16 100644 --- a/docker/templates/base/latest/Dockerfile +++ b/docker/templates/base/latest/Dockerfile @@ -78,7 +78,7 @@ ENV ANSIBLE_OPTS -i localhost, \ ENV NEXUS_URL https://oss.sonatype.org ENV NEXUS_REPOSITORY releases ENV KPM_INSTALL_DIR /opt -ENV KPM_VERSION 0.6.4 +ENV KPM_VERSION 0.8.1 RUN ansible-playbook $ANSIBLE_OPTS \ -e kpm_install_dir=$KPM_INSTALL_DIR \ -e nexus_url=$NEXUS_URL \ @@ -109,13 +109,6 @@ RUN ansible-playbook $ANSIBLE_OPTS \ -e gnu_arch="$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)" \ $KILLBILL_CLOUD_ANSIBLE_ROLES/tomcat.yml -ENV START_TOMCAT_CMD ansible-playbook $ANSIBLE_OPTS \ - -e tomcat_owner=$TOMCAT_OWNER \ - -e tomcat_group=$TOMCAT_GROUP \ - -e tomcat_home=$TOMCAT_HOME \ - -e catalina_home=$CATALINA_HOME \ - -e catalina_base=$CATALINA_BASE \ - $KILLBILL_CLOUD_ANSIBLE_ROLES/tomcat_restart.yml # Start Tomcat EXPOSE 8080 -CMD ["bash", "-c", "$START_TOMCAT_CMD && tail -F $CATALINA_BASE/logs/catalina.out"] +CMD ["/usr/share/tomcat/bin/catalina.sh", "run"] diff --git a/docker/templates/build/Dockerfile b/docker/templates/build/Dockerfile index 45eab6b1..6ae66893 100644 --- a/docker/templates/build/Dockerfile +++ b/docker/templates/build/Dockerfile @@ -37,17 +37,19 @@ RUN mkdir -p /var/lib/jruby \ | tar -z -x --strip-components=1 -C /var/lib/jruby ENV PATH /var/lib/jruby/bin:$PATH -RUN jruby -S gem install bundler jbundler therubyrhino +RUN jruby -S gem install bundler --version 1.16.1 +RUN jruby -S gem install jbundler --version 0.9.3 +RUN jruby -S gem install therubyrhino --version 2.0.4 RUN ln -s /var/lib/jruby/bin/jruby /var/lib/jruby/bin/ruby ENV JRUBY_OPTS=-J-Xmx1024m # Add extra rubies -RUN /bin/bash -l -c "rvm install ruby-1.8.7-p374 && rvm use ruby-1.8.7-p374 && gem install bundler && \ - rvm install ruby-2.2.2 && rvm use ruby-2.2.2 && gem install bundler && \ - rvm install ruby-2.4.2 && rvm use ruby-2.4.2 && gem install bundler && \ - rvm install jruby-9.1.14.0 && rvm use jruby-9.1.14.0 && gem install bundler" +RUN /bin/bash -l -c "rvm install ruby-1.8.7-p374 && rvm use ruby-1.8.7-p374 && gem install bundler --version 1.16.1 && \ + rvm install ruby-2.2.2 && rvm use ruby-2.2.2 && gem install bundler --version 1.16.1 && \ + rvm install ruby-2.4.2 && rvm use ruby-2.4.2 && gem install bundler --version 1.16.1 && \ + rvm install jruby-9.1.14.0 && rvm use jruby-9.1.14.0 && gem install bundler --version 1.16.1" # Add killbill user into sudo group RUN adduser --disabled-password --gecos '' killbill && \ diff --git a/docker/templates/build/settings.xml b/docker/templates/build/settings.xml index 98630c60..ef2f93fb 100644 --- a/docker/templates/build/settings.xml +++ b/docker/templates/build/settings.xml @@ -9,7 +9,7 @@ central Central Repository - http://repo.maven.apache.org/maven2 + https://repo.maven.apache.org/maven2 true @@ -49,32 +49,6 @@ - - - true - - - - - false - - central - bintray - http://jcenter.bintray.com - - - - - - false - - central - bintray-plugins - http://jcenter.bintray.com - - - bintray - diff --git a/docker/templates/kaui/latest/Dockerfile b/docker/templates/kaui/latest/Dockerfile index 9b318a0f..53c02b41 100644 --- a/docker/templates/kaui/latest/Dockerfile +++ b/docker/templates/kaui/latest/Dockerfile @@ -1,27 +1,24 @@ +# Shell image that installs Kaui on startup FROM killbill/base MAINTAINER Kill Bill core team -USER root +ENV KAUI_INSTALL_DIR /var/lib/kaui -# Default local and non multi-tenant properties -ENV KAUI_KILLBILL_URL http://127.0.0.1:8080 -ENV KAUI_KILLBILL_API_KEY bob -ENV KAUI_KILLBILL_API_SECRET lazar +RUN sudo mkdir -p $KAUI_INSTALL_DIR +RUN sudo chown -R $TOMCAT_OWNER:$TOMCAT_GROUP $KAUI_INSTALL_DIR -ENV KAUI_CONFIG_DAO_URL jdbc:mysql://localhost:3306/kaui -ENV KAUI_CONFIG_DAO_USER kaui -ENV KAUI_CONFIG_DAO_PASSWORD kaui -ENV KAUI_CONFIG_DAO_ADAPTER jdbcmysql +# Default kpm.yml, override as needed +COPY ./kpm.yml $KAUI_INSTALL_DIR -ENV KAUI_CONFIG_DEMO false +COPY ./kaui.sh $KAUI_INSTALL_DIR -ENV KAUI_ROOT_USERNAME admin - -COPY ./kaui.sh /etc/init.d/kaui.sh -RUN chmod +x /etc/init.d/kaui.sh - -USER tomcat7 - -COPY ./kpm.yml.erb $KILLBILL_CONFIG - -CMD ["/etc/init.d/kaui.sh", "run"] +ENV KPM_INSTALL_CMD ansible-playbook $ANSIBLE_OPTS \ + -e kpm_install_dir=$KPM_INSTALL_DIR \ + -e kpm_version=$KPM_VERSION \ + -e kpm_yml=$KAUI_INSTALL_DIR/kpm.yml \ + -e tomcat_owner=$TOMCAT_OWNER \ + -e tomcat_group=$TOMCAT_GROUP \ + -e catalina_base=$CATALINA_BASE \ + $KILLBILL_CLOUD_ANSIBLE_ROLES/kaui.yml +# Run kpm install and start Tomcat +CMD ["/var/lib/kaui/kaui.sh"] diff --git a/docker/templates/kaui/latest/kaui.sh b/docker/templates/kaui/latest/kaui.sh index c3a14ad7..780e1cee 100755 --- a/docker/templates/kaui/latest/kaui.sh +++ b/docker/templates/kaui/latest/kaui.sh @@ -1,34 +1,5 @@ #!/bin/bash -source "/etc/init.d/tomcat.sh" +$KPM_INSTALL_CMD -function install { - setup_kpm_yml - - echo >&2 "Starting Kaui installation..." - jruby -S kpm install $KPM_PROPS $KILLBILL_CONFIG/kpm.yml -} - -function run { - install - - # Load JVM properties - JVM_OPTS=$(jruby -ryaml -e 'puts (YAML.load_file("#{ENV['"'KILLBILL_CONFIG'"']}/kpm.yml") || {})["kaui"]["jvm"]') - KAUI_OPTS=$(jruby -ryaml -e 'y=(YAML.load_file("#{ENV['"'KILLBILL_CONFIG'"']}/kpm.yml") || {})["kaui"]["properties"]; puts y.inject("") { |result, (k,v) | result = "#{result} -D#{k}=#{v}" }') - export CATALINA_OPTS="$JVM_OPTS $KAUI_OPTS" - - echo >&2 "Starting Kaui: CATALINA_OPTS=$CATALINA_OPTS" - cd /var/lib/tomcat7 && /usr/share/tomcat7/bin/catalina.sh run -} - -case "$1" in - run) - run - ;; - cleanup) - cleanup - ;; - *) - echo $"Usage: $0 {run|cleanup}" - exit 1 -esac +exec /usr/share/tomcat/bin/catalina.sh run diff --git a/docker/templates/kaui/latest/kpm.yml b/docker/templates/kaui/latest/kpm.yml new file mode 100644 index 00000000..fa9a3548 --- /dev/null +++ b/docker/templates/kaui/latest/kpm.yml @@ -0,0 +1,5 @@ +--- +kaui: + # Used for the sha1.yml + plugins_dir: /var/lib/kaui + webapp_path: /var/lib/tomcat/webapps/ROOT.war diff --git a/docker/templates/kaui/latest/kpm.yml.erb b/docker/templates/kaui/latest/kpm.yml.erb deleted file mode 100644 index 5ec6c1b9..00000000 --- a/docker/templates/kaui/latest/kpm.yml.erb +++ /dev/null @@ -1,21 +0,0 @@ -kaui: - group_id: <%= ENV['KAUI_GROUP_ID'] %> - artifact_id: <%= ENV['KAUI_ARTIFACT_ID'] %> - version: <%= ENV['KAUI_VERSION'] %> - jvm: "-server -XX:PermSize=<%= ENV['KILLBILL_JVM_PERM_SIZE'] %> -XX:MaxPermSize=<%= ENV['KILLBILL_JVM_MAX_PERM_SIZE'] %> -Xms<%= ENV['KILLBILL_JVM_XMS'] %> -Xmx<%= ENV['KILLBILL_JVM_XMX'] %> -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=12345 -XX:+CMSClassUnloadingEnabled -XX:-OmitStackTraceInFastThrow -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=<%= ENV['KILLBILL_JVM_CMS_INITIATING_OCCUPANCY_FRACTION'] %> -Xloggc:/var/log/tomcat7/gc.log -XX:+PrintGCApplicationConcurrentTime -XX:+PrintGCApplicationStoppedTime -XX:+PrintGCDateStamps -XX:+PrintGCDetails -XX:+PrintTenuringDistribution -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=14 -XX:GCLogFileSize=100M" - nexus: - ssl_verify: false - url: https://oss.sonatype.org - repository: releases - properties: - java.security.egd: file:/dev/./urandom - kaui.db.adapter: <%= ENV['KAUI_CONFIG_DAO_ADAPTER'] %> - kaui.db.url: <%= ENV['KAUI_CONFIG_DAO_URL'] %> - kaui.db.username: <%= ENV['KAUI_CONFIG_DAO_USER'] %> - kaui.db.password: <%= ENV['KAUI_CONFIG_DAO_PASSWORD'] %> - kaui.demo: <%= ENV['KAUI_CONFIG_DEMO'] %> - kaui.url: <%= ENV['KAUI_KILLBILL_URL'] %> - kaui.api_key: <%= ENV['KAUI_KILLBILL_API_KEY'] %> - kaui.api_secret: <%= ENV['KAUI_KILLBILL_API_SECRET'] %> - kaui.root_username: <%= ENV['KAUI_ROOT_USERNAME'] %> - webapp_path: "/var/lib/tomcat7/webapps/ROOT.war" diff --git a/docker/templates/kaui/tagged/Dockerfile.template b/docker/templates/kaui/tagged/Dockerfile.template index fa7cf3b4..0a67067c 100644 --- a/docker/templates/kaui/tagged/Dockerfile.template +++ b/docker/templates/kaui/tagged/Dockerfile.template @@ -4,6 +4,9 @@ MAINTAINER Kill Bill core team # VERSION will be expanded in Makefile ENV KAUI_VERSION __VERSION__ -# Install Kill Bill -RUN kpm pull_kaui_war --verify-sha1=false --destination=/var/lib/tomcat7/webapps/ROOT.war $KAUI_VERSION +# Default kpm.yml, override as needed +COPY ./kpm.yml $KAUI_INSTALL_DIR + +# Install Kaui +RUN kpm pull_kaui_war --destination=/var/lib/tomcat/webapps/ROOT.war --sha1_file=/var/lib/kaui/sha1.yml $KAUI_VERSION diff --git a/docker/templates/kaui/tagged/kpm.yml.template b/docker/templates/kaui/tagged/kpm.yml.template new file mode 100644 index 00000000..37a3e5e2 --- /dev/null +++ b/docker/templates/kaui/tagged/kpm.yml.template @@ -0,0 +1,6 @@ +--- +kaui: + version: __VERSION__ + # Used for the sha1.yml + plugins_dir: /var/lib/kaui + webapp_path: /var/lib/tomcat/webapps/ROOT.war diff --git a/docker/templates/killbill/latest/Dockerfile b/docker/templates/killbill/latest/Dockerfile index 35d2eafb..8096f668 100644 --- a/docker/templates/killbill/latest/Dockerfile +++ b/docker/templates/killbill/latest/Dockerfile @@ -14,6 +14,10 @@ COPY ./kpm.yml $KILLBILL_INSTALL_DIR ENV KILLBILL_BUNDLE_INSTALL_DIR $KILLBILL_INSTALL_DIR/bundles ENV KILLBILL_JRUBY_CONF_DIR $KILLBILL_INSTALL_DIR/config ENV KILLBILL_SERVER_BASE_URL http://$ENV_HOST_IP:8080 +ENV KILLBILL_KPM_PATH $KPM_INSTALL_DIR/kpm-$KPM_VERSION-linux-x86_64/kpm +ENV KILLBILL_KPM_BUNDLES_PATH $KILLBILL_BUNDLE_INSTALL_DIR + +COPY ./killbill.sh $KILLBILL_INSTALL_DIR ENV KPM_INSTALL_CMD ansible-playbook $ANSIBLE_OPTS \ -e kpm_install_dir=$KPM_INSTALL_DIR \ @@ -25,5 +29,31 @@ ENV KPM_INSTALL_CMD ansible-playbook $ANSIBLE_OPTS \ -e tomcat_group=$TOMCAT_GROUP \ -e catalina_base=$CATALINA_BASE \ $KILLBILL_CLOUD_ANSIBLE_ROLES/killbill.yml -# Run kpm install -CMD ["bash", "-c", "$KPM_INSTALL_CMD && $START_TOMCAT_CMD && touch $CATALINA_BASE/logs/killbill.out && tail -F $CATALINA_BASE/logs/killbill.out"] \ No newline at end of file + +ENV KPM_DIAGNOSTIC_CMD ansible-playbook $ANSIBLE_OPTS \ + -e kpm_install_dir=$KPM_INSTALL_DIR \ + -e kpm_version=$KPM_VERSION \ + -e kpm_yml=$KILLBILL_INSTALL_DIR/kpm.yml \ + -e kb_config_dir=$KILLBILL_INSTALL_DIR \ + -e kb_plugins_dir=$KILLBILL_INSTALL_DIR/bundles \ + -e tomcat_owner=$TOMCAT_OWNER \ + -e tomcat_group=$TOMCAT_GROUP \ + -e catalina_base=$CATALINA_BASE \ + $KILLBILL_CLOUD_ANSIBLE_ROLES/diagnostic.yml + +ENV MIGRATIONS_CMD ansible-playbook $ANSIBLE_OPTS \ + -e kpm_install_dir=$KPM_INSTALL_DIR \ + -e kpm_version=$KPM_VERSION \ + -e kpm_yml=$KILLBILL_INSTALL_DIR/kpm.yml \ + -e kb_config_dir=$KILLBILL_INSTALL_DIR \ + -e kb_plugins_dir=$KILLBILL_INSTALL_DIR/bundles \ + -e tomcat_owner=$TOMCAT_OWNER \ + -e tomcat_group=$TOMCAT_GROUP \ + -e catalina_base=$CATALINA_BASE \ + $KILLBILL_CLOUD_ANSIBLE_ROLES/migrations.yml + +# Install Flyway in the image +RUN ansible-playbook $ANSIBLE_OPTS $KILLBILL_CLOUD_ANSIBLE_ROLES/flyway.yml + +# Run kpm install and start Tomcat +CMD ["/var/lib/killbill/killbill.sh"] diff --git a/docker/templates/killbill/latest/killbill.sh b/docker/templates/killbill/latest/killbill.sh new file mode 100755 index 00000000..780e1cee --- /dev/null +++ b/docker/templates/killbill/latest/killbill.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +$KPM_INSTALL_CMD + +exec /usr/share/tomcat/bin/catalina.sh run diff --git a/docker/templates/killbill/latest/kpm.yml b/docker/templates/killbill/latest/kpm.yml index 64ddb63b..d69e076f 100644 --- a/docker/templates/killbill/latest/kpm.yml +++ b/docker/templates/killbill/latest/kpm.yml @@ -1,8 +1,6 @@ --- killbill: - version: 0.20.0 + version: LATEST plugins: - ruby: - - name: kpm plugins_dir: /var/lib/killbill/bundles webapp_path: /var/lib/tomcat/webapps/ROOT.war diff --git a/docker/templates/killbill/tagged/Dockerfile.template b/docker/templates/killbill/tagged/Dockerfile.template index b1839c45..2b7aa250 100644 --- a/docker/templates/killbill/tagged/Dockerfile.template +++ b/docker/templates/killbill/tagged/Dockerfile.template @@ -12,6 +12,3 @@ RUN kpm pull_kb_server_war --destination=/var/lib/tomcat/webapps/ROOT.war --bund # Install default bundles RUN kpm pull_defaultbundles --destination=/var/lib/killbill/bundles $KILLBILL_VERSION - -# Install kpm plugin by default -RUN kpm pull_ruby_plugin kpm --destination=/var/lib/killbill/bundles $KILLBILL_VERSION diff --git a/docker/templates/killbill/tagged/kpm.yml.template b/docker/templates/killbill/tagged/kpm.yml.template index 9dcc430e..78be25e6 100644 --- a/docker/templates/killbill/tagged/kpm.yml.template +++ b/docker/templates/killbill/tagged/kpm.yml.template @@ -2,7 +2,5 @@ killbill: version: __VERSION__ plugins: - ruby: - - name: kpm plugins_dir: /var/lib/killbill/bundles webapp_path: /var/lib/tomcat/webapps/ROOT.war diff --git a/docker/templates/mariadb/tagged/Dockerfile.template b/docker/templates/mariadb/tagged/Dockerfile.template index c0ff4f66..d7ff1f8c 100644 --- a/docker/templates/mariadb/tagged/Dockerfile.template +++ b/docker/templates/mariadb/tagged/Dockerfile.template @@ -19,9 +19,7 @@ RUN set -x \ # Install the Kaui DDL (point to latest, rarely changes) && wget https://raw.githubusercontent.com/killbill/killbill-admin-ui/master/db/ddl.sql -O - >> /docker-entrypoint-initdb.d/020_kaui.sql \ # Install the DDL of the most popular plugins (point to latest, rarely changes) - && wget https://raw.githubusercontent.com/killbill/killbill-stripe-plugin/master/db/ddl.sql -O - >> /docker-entrypoint-initdb.d/010_killbill.sql \ - && wget https://raw.githubusercontent.com/killbill/killbill-paypal-express-plugin/master/db/ddl.sql -O - >> /docker-entrypoint-initdb.d/010_killbill.sql \ - && wget https://raw.githubusercontent.com/killbill/killbill-braintree-blue-plugin/master/db/ddl.sql -O - >> /docker-entrypoint-initdb.d/010_killbill.sql \ + && wget https://raw.githubusercontent.com/killbill/killbill-stripe-plugin/master/src/main/resources/ddl.sql -O - >> /docker-entrypoint-initdb.d/010_killbill.sql \ && wget https://raw.githubusercontent.com/killbill/killbill-analytics-plugin/master/src/main/resources/org/killbill/billing/plugin/analytics/ddl.sql -O - >> /docker-entrypoint-initdb.d/010_killbill.sql \ && wget https://raw.githubusercontent.com/killbill/killbill-adyen-plugin/master/src/main/resources/ddl.sql -O - >> /docker-entrypoint-initdb.d/010_killbill.sql \ && apt-get purge -y --auto-remove ca-certificates wget diff --git a/docker/templates/postgresql/tagged/Dockerfile.template b/docker/templates/postgresql/tagged/Dockerfile.template index 3443918f..65b3f429 100644 --- a/docker/templates/postgresql/tagged/Dockerfile.template +++ b/docker/templates/postgresql/tagged/Dockerfile.template @@ -14,8 +14,8 @@ RUN printf '\connect kaui;\n\n' >> /docker-entrypoint-initdb.d/020_kaui.sql RUN set -x \ && apt-get update && apt-get install -y --no-install-recommends ca-certificates wget && rm -rf /var/lib/apt/lists/* \ - # Install the Kill Bill PostgreSQL DDL bridge - && wget https://raw.githubusercontent.com/killbill/killbill/killbill-$KILLBILL_VERSION.0/util/src/main/resources/org/killbill/billing/util/ddl-postgresql.sql -O - > /var/tmp/postgres-bridge.sql \ + # Install the latest Kill Bill PostgreSQL DDL bridge (see https://github.com/killbill/killbill-cloud/issues/163) + && wget https://raw.githubusercontent.com/killbill/killbill/master/util/src/main/resources/org/killbill/billing/util/ddl-postgresql.sql -O - > /var/tmp/postgres-bridge.sql \ && cat /var/tmp/postgres-bridge.sql >> /docker-entrypoint-initdb.d/010_killbill.sql \ && cat /var/tmp/postgres-bridge.sql >> /docker-entrypoint-initdb.d/020_kaui.sql \ # Install the Kill Bill DDL @@ -23,7 +23,7 @@ RUN set -x \ # Install the Kaui DDL (point to latest, rarely changes) && wget https://raw.githubusercontent.com/killbill/killbill-admin-ui/master/db/ddl.sql -O - >> /docker-entrypoint-initdb.d/020_kaui.sql \ # Install the DDL of the most popular plugins (point to latest, rarely changes) - && wget https://raw.githubusercontent.com/killbill/killbill-stripe-plugin/master/db/ddl.sql -O - >> /docker-entrypoint-initdb.d/010_killbill.sql \ + && wget https://raw.githubusercontent.com/killbill/killbill-stripe-plugin/master/src/main/resources/ddl.sql -O - >> /docker-entrypoint-initdb.d/010_killbill.sql \ && wget https://raw.githubusercontent.com/killbill/killbill-analytics-plugin/master/src/main/resources/org/killbill/billing/plugin/analytics/ddl.sql -O - >> /docker-entrypoint-initdb.d/010_killbill.sql \ && apt-get purge -y --auto-remove ca-certificates wget diff --git a/kpm/.rubocop.yml b/kpm/.rubocop.yml new file mode 100644 index 00000000..6920888b --- /dev/null +++ b/kpm/.rubocop.yml @@ -0,0 +1,73 @@ +# RuboCop 0.69.0 dropped support for 2.2 +#AllCops: +# TargetRubyVersion: 2.2 +Gemspec/RequiredRubyVersion: + Enabled: false + +# Alternative? +Gemspec/RubyVersionGlobalsUsage: + Enabled: false + +Layout/DefEndAlignment: + AutoCorrect: true + +Lint/HandleExceptions: + AllowComments: true + +Metrics/AbcSize: + Enabled: false + +Metrics/BlockLength: + Enabled: false + +Metrics/BlockNesting: + Enabled: false + +Metrics/ClassLength: + Enabled: false + +Metrics/CyclomaticComplexity: + Enabled: false + +Metrics/LineLength: + Enabled: false + +Metrics/MethodLength: + Enabled: false + +Metrics/ModuleLength: + Enabled: false + +Metrics/ParameterLists: + Enabled: false + +Metrics/PerceivedComplexity: + Enabled: false + +Security/YAMLLoad: + Enabled: false + +# We must support old Rubies +Style/BracesAroundHashParameters: + Enabled: false + +Style/Documentation: + Enabled: false + +Style/EmptyElse: + EnforcedStyle: empty + +# We must support old Rubies +Style/HashSyntax: + Enabled: false + +# Ruby 2.3+ only +Style/NumericPredicate: + Enabled: false + +# Ruby 2.3+ only +Style/SafeNavigation: + Enabled: false + +Style/GuardClause: + Enabled: false diff --git a/kpm/Gemfile b/kpm/Gemfile index fa75df15..7f4f5e95 100644 --- a/kpm/Gemfile +++ b/kpm/Gemfile @@ -1,3 +1,5 @@ +# frozen_string_literal: true + source 'https://rubygems.org' gemspec diff --git a/kpm/README.adoc b/kpm/README.adoc index 83c0aa2f..4a2eeb1b 100644 --- a/kpm/README.adoc +++ b/kpm/README.adoc @@ -3,65 +3,44 @@ :toclevels: 9 [[kpm-the-kill-bill-package-manager]] -# KPM: the Kill Bill Package Manager += KPM: the Kill Bill Package Manager -The goal of KPM is to facilitate the installation of Kill Bill, its plugins and Kaui. - -kpm can be used interactively to search and download individual artifacts (Kill Bill war, plugins, etc.) or to perform an automatic Kill Bill installation using a configuration file. +KPM is a command line utility which facilitates the installation of Kill Bill, its plugins and Kaui. It also provides utility helpers useful for the day-to-day management of a production system. toc::[] -[[installation]] -## Installation +[[kpm-installation]] +== KPM Installation -[[pre-built-binaries-recommended-linux-and-macos-only]] -### Pre-built binaries (recommended, Linux and MacOS only) +[[pre-built-binaries-recommended]] +=== Pre-built binaries (recommended) -Note that this installation method assumes `/bin/bash` to be available on your system. +KPM self-contained builds are available on http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.kill-bill.billing.installer%22%20AND%20a%3A%22kpm%22[Maven Central] with coordinates `org.kill-bill.billing.installer:kpm`. -KPM builds are available on http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.kill-bill.billing.installer%22%20AND%20a%3A%22kpm%22[Maven Central] with coordinates `org.kill-bill.billing.installer:kpm`. +Download the `.tar.gz` package matching your architecture (Linux and MacOS only). -Download the package matching your architecture. +Note that this installation method assumes `/bin/bash` to be available on your system. [[through-rubygems]] -### Through Rubygems - -Ruby is required to run KPM itself (it is not a dependency of Kill Bill). - -Ruby 2.1+ or JRuby 1.7.20+ is recommended. If you don’t have a Ruby installation yet, use https://rvm.io/rvm/install[RVM]: - -.... -gpg --keyserver hkp://keys.gnupg.net --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 -\curl -sSL https://get.rvm.io | bash -s stable --ruby -.... - -After following the post-installation instructions, you should have access to the `ruby` and `gem` executables. +=== Through Rubygems -You can then run: - -.... -gem install kpm -.... +KPM is also available on https://rubygems.org/gems/kpm[RubyGems.org]. -[[quick-start]] -## Quick start +[[kpm-commands]] +== KPM Commands -The following commands +[[kill-bill-installation]] +=== Kill Bill installation -.... -mkdir killbill -cd killbill -kpm install -.... +[[kpm-install]] +==== kpm install -will setup https://github.com/killbill/killbill[Kill Bill] and https://github.com/killbill/killbill-admin-ui-standalone[Kaui], i.e.: +`kpm install` (with no argument) will setup https://github.com/killbill/killbill[Kill Bill] and https://github.com/killbill/killbill-admin-ui-standalone[Kaui] in your current directory, including: -* http://tomcat.apache.org/[Tomcat] (open-source Java web server) is setup in the `killbill` directory -* The Kill Bill application (war) is installed in the `killbill/webapps` directory -* The Kill Bill UI (Kaui war) is installed in the `killbill/webapps` directory -* Default plugins are installed in the `/var/tmp/bundles` directory, among them: -* `jruby.jar`, required to run Ruby plugins -* the https://github.com/killbill/killbill-kpm-plugin[KPM plugin], required to (un-)install plugins at runtime +* http://tomcat.apache.org/[Tomcat] (open-source Java web server) +* The Kill Bill application (war) is installed in the `./webapps` directory +* The Kill Bill UI (Kaui war) is installed in the `./webapps` directory +* Default OSGI bundles are installed in the `/var/tmp/bundles` directory To start Kill Bill, simply run @@ -71,28 +50,23 @@ To start Kill Bill, simply run You can then verify Kill Bill is running by going to http://127.0.0.1:8080/kaui. -[[using-kpm]] -## Using KPM - -[[custom-installation-through-kpm.yml-file]] -### Custom Installation Through `kpm.yml` File +[[kpm-install-using-a-kpm.yml-file]] +==== kpm install Using A `kpm.yml` File -KPM allows you to specify a configuration file, `kpm.yml`, to describe what should be installed. The configuration file is a `yml`. The following shows the syntax of the `kpm.yml` file: +KPM also lets you specify a configuration file, `kpm.yml`, to describe what should be installed. For example: .... killbill: - version: 0.18.0 + version: 0.20.12 plugins: java: - name: analytics - ruby: - - name: stripe .... This instructs kpm to: -* Download Kill Bill version 0.18.0 -* Setup the https://github.com/killbill/killbill-analytics-plugin[Analytics] (Java) plugin and the https://github.com/killbill/killbill-stripe-plugin[Stripe] (Ruby) plugin +* Download Kill Bill version 0.20.12 +* Setup the https://github.com/killbill/killbill-analytics-plugin[Analytics] (Java) plugin (you would still need to manually install the plugin https://github.com/killbill/killbill-analytics-plugin/blob/master/src/main/resources/org/killbill/billing/plugin/analytics/ddl.sql[DDL file]) To start the installation: @@ -100,14 +74,11 @@ To start the installation: kpm install kpm.yml .... -Here is a more advanced example: +Here is a more advanced example, specifying a custom Nexus repository and non-standard deployment directories: .... killbill: - group_id: org.kill-bill.billing - artifact_id: killbill-profiles-killbill - version: 0.18.10 - default_bundles_version: 0.36.11 + version: 0.20.12 nexus: ssl_verify: false url: http://nexus.acme @@ -118,70 +89,45 @@ killbill: - name: acme:custom artifact_id: custom-plugin version: 0.0.1-SNAPSHOT - ruby: - - name: kpm plugins_dir: /var/tmp/bundles webapp_path: /var/lib/tomcat/webapps/ROOT.war .... -[[custom-downloads]] -### Custom Downloads +[[artifacts-downloads]] +==== Artifacts Downloads -You can also download specific versions/artifacts directly with the following commands – bypassing the kpm.yml file: +You can download specific artifacts with the following commands: -* `kpm pull_kaui_war ` -* `kpm pull_kb_server_war ` -* `kpm install_ruby_plugin plugin-key ` -* `kpm install_java_plugin plugin-key ` +* `kpm pull_kaui_war `: downloads the Kaui war +* `kpm pull_kb_server_war `: downloads the Kill Bill war +* `kpm install_java_plugin plugin-key `: downloads a Java plugin identified by `plugin-key` +* `kpm install_ruby_plugin plugin-key `: downloads a Ruby plugin identified by `plugin-key` +* `kpm pull_defaultbundles `: downloads the default Kill Bill OSGI bundles -For more details see `kpm help`. +This is especially useful when scripting deployments. -[[dev-mode]] -### Dev Mode +Note: previous plugin versions aren't cleaned up by default to facilitate rollbacks. If your deployment is stateful, unused, old plugin versions will slow the Kill Bill startup time. To cleanup these entries, use `kpm cleanup` which will delete all non-default plugin versions, or `kpm uninstall`. -If you are a developer and either modifying an existing plugin or creating a new plugin, KPM can be used to install the code of your plugin. Before going further, make sure you read the http://docs.killbill.io/latest/plugin_development.html[Plugin Development Documentation] first. +[[kpm-uninstall]] +==== kpm uninstall -Let’s assume you are modifying the code for the (Ruby) CyberSource plugin. You would have to first build the plugin package, and then you could use KPM to install the plugin. We suggest you specify a `plugin_key` with a namespace `dev:` to make it clear this is not a released version. +`kpm uninstall plugin-key` will uninstall a plugin identified by `plugin-key` (all versions are uninstalled by default). -.... -kpm install_ruby_plugin 'dev:cybersource' --from-source-file="/killbill-cybersource-3.3.0.tar.gz" -.... +[[kpm-migrations]] +==== kpm migrations -Let’s assume now that you are modifying the code for the (Java) Adyen plugin. The plugin first needs to be built using the `maven-bundle-plugin` to produce the OSGI jar under the `target` directory. Then, this `jar` can be installed using KPM (you would also need to specify a version here since the archive does not embed any metadata, unlike Ruby plugins packages). The same applies with regard to the `plugin_key` where we suggest to specify a namespace `dev:`. +`kpm migrations` is used to download database migration files when upgrading Kill Bill or plugins. See our http://docs.killbill.io/0.20/database_migrations.html[database migrations guief] for more details. -.... -kpm install_java_plugin 'dev:adyen' --from-source-file="/adyen-plugin-0.3.2-SNAPSHOT.jar" --version="0.3.2" -.... +[[operations]] +=== Operations -The command `kpm inspect` can be used to see what has been installed. In the case of `dev` plugins, most of the infofrmation related to `GROUP ID`, `ARTIFACT ID`, `PACKAGING` and `SHA1` will be missing because no real download occured. +[[kpm-inspect]] +==== kpm inspect -Finally, when it is time to use a released version of a plugin, we first recommend to uninstall the `dev` version, by using the `kpm uninstall` command and using the `plugin_key`, and then installing the released version. For instance the following sequence could happen: +The command `kpm inspect` can be used to see which plugins have been installed: .... -> kpm inspect -___________________________________________________________________________________________________________________________ -| PLUGIN NAME | PLUGIN KEY | TYPE | GROUP ID | ARTIFACT ID | PACKAGING | VERSIONS sha1=[], def=(*), del=(x) | -___________________________________________________________________________________________________________________________ -| killbill-cybersource | dev:cybersource | ruby | ??? | ??? | ??? | 3.3.0[???](*) | -| adyen | dev:adyen | java | ??? | ??? | ??? | 0.3.2[???](*) | -___________________________________________________________________________________________________________________________ - -> kpm uninstall 'dev:cybersource' -Removing the following versions of the killbill-cybersource plugin: 3.3.0 -Done! - -> kpm inspect - -_____________________________________________________________________________________________________________ -| PLUGIN NAME | PLUGIN KEY | TYPE | GROUP ID | ARTIFACT ID | PACKAGING | VERSIONS sha1=[], def=(*), del=(x) | -_____________________________________________________________________________________________________________ -| adyen | dev:adyen | java | ??? | ??? | ??? | 0.3.2[???](*) | -_____________________________________________________________________________________________________________ - -> kpm install_ruby_plugin cybersource -[...] - -> kpm inspect +kpm inspect --destination=/var/tmp/bundles _______________________________________________________________________________________________________________________________________________________ | PLUGIN NAME | PLUGIN KEY | TYPE | GROUP ID | ARTIFACT ID | PACKAGING | VERSIONS sha1=[], def=(*), del=(x) | _______________________________________________________________________________________________________________________________________________________ @@ -190,6 +136,56 @@ ________________________________________________________________________________ _______________________________________________________________________________________________________________________________________________________ .... +Note: `GROUP ID`, `ARTIFACT ID`, `PACKAGING` and `SHA1` can be missing (`???`) when installing plugins which aren't hosted in a Nexus repository. This isn't an issue. + +[[kpm-system]] +==== kpm system + +The command `kpm system` is a superset of the `inspect` command. In addition to plugins information, the command will return details about the Kill Bill and Kaui installation, Java and Ruby environment, details about the OS, CPU, Memory and disks, entropy available, etc. + +Note: for non-standard deployments, you will need to tell KPM the location of the Kill Bill and Kaui webapp (see `kpm help system`). + +[[kpm-diagnostic]] +==== kpm diagnostic + +The command `kpm diagnostic` is a superset of the `system` command. It will connect to your Kill Bill instance to gather tenant configuration information and account data (if a specific account is specified) and will gather all log files. + +You will need to instruct KPM how to connect to your Kill Bill instance (see `kpm help diagnostic`). + +[[kpm-account]] +==== kpm account + +`kpm account` exports all account data from a running system and re-imports it in another Kill Bill installation. This is an advanced command and is usually run as part of `kpm agnostic`. + +[[kpm-tenant_config]] +==== kpm tenant_config + +`kpm tenant_config` exports tenant specific data from a running system. This is an advanced command and is usually run as part of `kpm agnostic`. + +[[kpm-cleanup]] +==== kpm cleanup + +`kpm cleanup` which will delete all non-default plugin versions. The `--dry-run` option can be used to double check first what would be deleted. + +[[plugins-development]] +=== Plugins development + +[[kpm-info]] +==== kpm info + +`kpm info` lists the libraries to use when writing a plugin for a specific Kill Bill version. It also lists all of the official plugins for that specific version. + +[[installing-custom-plugins]] +==== Installing Custom Plugins + +If you are a developer and either modifying an existing plugin or creating a new plugin, KPM can be used to install the code of your plugin. Before going further, make sure you read the http://docs.killbill.io/latest/plugin_development.html[Plugin Development Documentation] first. + +Let’s assume now that you are modifying the code for the (Java) Adyen plugin. The plugin first needs to be built using the `maven-bundle-plugin` to produce the OSGI jar under the `target` directory. Then, this `jar` can be installed using KPM. We suggest you specify a `plugin_key` with a namespace `dev:` to make it clear this is not a released version: + +.... +kpm install_java_plugin 'dev:adyen' --from-source-file="/adyen-plugin-0.3.2-SNAPSHOT.jar" --version="0.3.2" +.... + [[internals]] ## Internals @@ -225,14 +221,17 @@ Test suite that requires an instance of `mysql` running and verifies the followi [[plugin-keys]] ### Plugin Keys -In the `kpm.yml` example provided above, the plugins are named using their `pluginKey` (the value for the `name` in the `kpm.yml`) . The `pluginKey` is the identifier for the plugin: * For plugins maintained by the Kill Bill team, this identifier matches the key in the https://github.com/killbill/killbill-cloud/blob/master/kpm/lib/kpm/plugins_directory.yml[file based repository] of well-known plugins * For other plugins, this key is either specified when installing the plugin through api call, or default to the `pluginName`. For more information, please refer to the Plugin Developer Guide. +Plugins are named using their `pluginKey` (the value for the `name` entry in the `kpm.yml`) . The `pluginKey` is the identifier for the plugin: + +* For plugins maintained by the Kill Bill team, this identifier matches the key in the https://github.com/killbill/killbill-cloud/blob/master/kpm/lib/kpm/plugins_directory.yml[file based repository] of well-known plugins +* For other plugins, this key is either specified when installing the plugin through api call, or default to the `pluginName`. For more information, please refer to the http://docs.killbill.io/latest/plugin_development.html[Plugin Development guide]. [[caching]] ### Caching KPM relies on the `kpm.yml` file to know what to install, and as it installs the pieces, it keeps track of what was installed so that if it is invoked again, it does not download again the same binaries. The generic logic associated with that file is the following: -1. When installing a binary (`war`, `jar`, `tar.gz`..), KPM will download both the binary and the `sha1` from the server, compute the `sha1` for the binary and compare the two (verify that binary indeed matches its remote `sha1`). Then, binary is installed and `sha1.yml` file is updated. The `sha1` entry in that `sha1.yml` file will now represent the local `sha1` version (note that for `tar.gz` binaries which have been uncompressed, the local `sha1` is not anymore easily recomputable). +1. When installing a binary (`war`, `jar`, `tar.gz`, ...), KPM will download both the binary and the `sha1` from the server, compute the `sha1` for the binary and compare the two (verify that binary indeed matches its remote `sha1`). Then, the binary is installed and `sha1.yml` file is updated. The `sha1` entry in that `sha1.yml` file will now represent the local `sha1` version (note that for `tar.gz` binaries which have been uncompressed, the local `sha1` is not anymore easily recomputable). 2. When attempting to download again the same binary, KPM will compare the value in the `sha1.yml` and the one on the remote server and if those match, it will not download the binary again. There are some non standard scenario that could occur in case of users tampering with the data (or remove server unavailable): @@ -242,4 +241,7 @@ There are some non standard scenario that could occur in case of users tampering * `sha1` entry in the `sha1.yml` exists but has the special value `SKIP` : Binary will _not_ be downloaded again * Binary does not exist on the file system (or has been replaced with something else): KPM will ignore. Note that correct way to remove plugins is to use the `KPM uninstall` command. -Note that you can override that behavior with the `--force-download` switch. +Notes: + +* You can override that behavior with the `--force-download` switch +* When `--force-download` is specified (`false` by default), network access to a Nexus instance is required. Otherwise, downloads are idempotent even if no outbound networking is allowed (on initial download, the Nexus metadata is cached in the `sha1.yml` file which is re-used on subsequent installation if no outbound networking is allowed -- by default, KPM will try to get the latest metadata from Nexus though) diff --git a/kpm/Rakefile b/kpm/Rakefile old mode 100644 new mode 100755 index 6b5a0658..b628f1da --- a/kpm/Rakefile +++ b/kpm/Rakefile @@ -1,4 +1,5 @@ #!/usr/bin/env rake +# frozen_string_literal: true Dir.glob('tasks/*.rake').each { |r| import r } @@ -33,4 +34,4 @@ namespace :test do end # Run tests by default -task :default => 'test:spec' +task default: 'test:spec' diff --git a/kpm/bin/kpm b/kpm/bin/kpm index 776bc4b6..7354eab1 100755 --- a/kpm/bin/kpm +++ b/kpm/bin/kpm @@ -1,10 +1,12 @@ #!/usr/bin/env ruby -$:.push File.expand_path("../../lib", __FILE__) +# frozen_string_literal: true + +$LOAD_PATH.push File.expand_path('../lib', __dir__) require 'kpm' begin KPM::Cli.start -rescue => e +rescue StandardError => e KPM.ui.say "#{e.message}\n#{e.backtrace.join("\n")}", :red exit 1 end diff --git a/kpm/kpm.gemspec b/kpm/kpm.gemspec index de55bd56..8a6624e3 100644 --- a/kpm/kpm.gemspec +++ b/kpm/kpm.gemspec @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # # Copyright 2014 The Billing Project, LLC # @@ -14,7 +16,7 @@ # under the License. # -$LOAD_PATH.unshift File.expand_path('../lib', __FILE__) +$LOAD_PATH.unshift File.expand_path('lib', __dir__) require 'kpm/version' Gem::Specification.new do |s| @@ -34,17 +36,17 @@ Gem::Specification.new do |s| s.files = `git ls-files`.split("\n") s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n") s.bindir = 'bin' - s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) } - s.require_paths = ["lib"] + s.executables = `git ls-files -- bin/*`.split("\n").map { |f| File.basename(f) } + s.require_paths = ['lib'] s.rdoc_options << '--exclude' << '.' s.add_dependency 'highline', '~> 1.6.21' - s.add_dependency 'thor', '~> 0.19.1' + s.add_dependency 'killbill-client', '~> 3.2' s.add_dependency 'rubyzip', '~>1.2.0' - s.add_dependency 'killbill-client', '~> 1.0' + s.add_dependency 'thor', '~> 0.19.1' s.add_development_dependency 'rake', '>= 10.0.0', '< 11.0.0' s.add_development_dependency 'rspec', '~> 2.12.0' + s.add_development_dependency 'rubocop', '~> 0.74.0' if RUBY_VERSION >= '2.3' end - diff --git a/kpm/lib/kpm.rb b/kpm/lib/kpm.rb index c86cfce2..5fc0a84c 100644 --- a/kpm/lib/kpm.rb +++ b/kpm/lib/kpm.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module KPM autoload :Utils, 'kpm/utils' autoload :BaseArtifact, 'kpm/base_artifact' @@ -19,6 +21,7 @@ module KPM autoload :Migrations, 'kpm/migrations' autoload :System, 'kpm/system' autoload :Account, 'kpm/account' + autoload :Blob, 'kpm/blob' autoload :Database, 'kpm/database' autoload :TenantConfig, 'kpm/tenant_config' autoload :DiagnosticFile, 'kpm/diagnostic_file' diff --git a/kpm/lib/kpm/account.rb b/kpm/lib/kpm/account.rb index 6ef99945..22ae58ef 100644 --- a/kpm/lib/kpm/account.rb +++ b/kpm/lib/kpm/account.rb @@ -1,17 +1,18 @@ +# frozen_string_literal: true + require 'net/http' require 'tmpdir' require 'yaml' require 'date' require 'securerandom' +require 'base64' require 'killbill_client' module KPM - class Account - # Killbill server KILLBILL_HOST = ENV['KILLBILL_HOST'] || '127.0.0.1' - KILLBILL_URL = 'http://'.concat(KILLBILL_HOST).concat(':8080') + KILLBILL_URL = "http://#{KILLBILL_HOST}:8080" KILLBILL_API_VERSION = '1.0' # USER/PWD @@ -24,7 +25,7 @@ class Account # Temporary directory TMP_DIR_PEFIX = 'killbill' - TMP_DIR = Dir.mktmpdir(TMP_DIR_PEFIX); + TMP_DIR = Dir.mktmpdir(TMP_DIR_PEFIX) # Created By WHO = 'kpm_export_import' @@ -34,93 +35,86 @@ class Account PLUGIN_NAME_COLUMN = 'plugin_name' # fields to remove from the export files - REMOVE_DATA_FROM = {:accounts => [:name, :address1, :address2, :city, :state_or_province, :phone, :email], - :account_history => [:name, :address1, :address2, :city, :state_or_province, :phone, :email]} + REMOVE_DATA_FROM = { accounts: %i[name address1 address2 city state_or_province phone email], + account_history: %i[name address1 address2 city state_or_province phone email] }.freeze - DATE_COLUMNS_TO_FIX = ['created_date','updated_date','processing_available_date','effective_date', - 'boot_date','start_timestamp','last_access_time','payment_date','original_created_date', - 'last_sys_update_date','charged_through_date','bundle_start_date','start_date'] + DATE_COLUMNS_TO_FIX = %w[created_date updated_date processing_available_date effective_date + boot_date start_timestamp last_access_time payment_date original_created_date + last_sys_update_date charged_through_date bundle_start_date start_date catalog_effective_date reference_time].freeze # round trip constants duplicate record - ROUND_TRIP_EXPORT_IMPORT_MAP = {:accounts => {:id => :accounts_id, :external_key => :accounts_id}, :all => {:account_id => :accounts_id}, - :account_history => {:id => :account_history_id, :external_key => :accounts_id, :payment_method_id => :payment_methods_id}, - :account_emails => {:id => :account_emails_id}, :account_email_history => {:id => :account_email_history_id}, - :subscription_events => {:id => :subscription_events_id},:subscriptions => {:id => :subscriptions_id}, - :bundles => {:id => :bundles_id},:blocking_states => {:id => :blocking_states_id, :blockable_id => nil}, - :invoice_items => {:id => :invoice_items_id, :child_account_id => nil, :invoice_id => :invoices_id, :bundle_id => :bundles_id, :subscription_id => :subscriptions_id }, - :invoices => {:id => :invoices_id}, - :invoice_payments => {:id => :invoice_payments_id, :invoice_id => :invoices_id, :payment_id => :payments_id}, - :invoice_parent_children => {:id => :invoice_parent_children_id, :parent_invoice_id => nil, :child_invoice_id => nil, :child_account_id => nil}, - :payment_attempts => {:id => :payment_attempts_id, :payment_method_id => :payment_methods_id, :transaction_id => :payment_transactions_id}, - :payment_attempt_history => {:id => :payment_attempt_history_id, :payment_method_id => :payment_methods_id, :transaction_id => :payment_transactions_id}, - :payment_methods => {:id => :payment_methods_id, :external_key => :generate},:payment_method_history => {:id => :payment_method_history_id}, - :payments => {:id => :payments_id, :payment_method_id => :payment_methods_id}, - :payment_history => {:id => :payment_history_id, :payment_method_id => :payment_methods_id}, - :payment_transactions => {:id => :payment_transactions_id, :payment_id => :payments_id}, - :payment_transaction_history => {:id => :payment_transaction_history_id, :payment_id => :payments_id}, - :_invoice_payment_control_plugin_auto_pay_off => {:payment_method_id => :payment_methods_id, :payment_id => :payments_id}, - :rolled_up_usage => {:id => :rolled_up_usage_id, :subscription_id => :subscriptions_id, :tracking_id => nil}, - :custom_fields => {:id => :custom_fields_id},:custom_field_history => {:id => :custom_field_history_id}, - :tag_definitions => {:id => :tag_definitions_id},:tag_definition_history => {:id => :tag_definition_history_id}, - :tags => {:id => :tags_id, :object_id => nil}, - :tag_history => {:id => :tag_history_id, :object_id => nil}, - :audit_log => {:id => :audit_log_id} - } - - #delimeters to sniff - DELIMITERS = [',','|'] - DEFAULT_DELIMITER = "|" + ROUND_TRIP_EXPORT_IMPORT_MAP = { accounts: { id: :accounts_id, external_key: :accounts_id }, all: { account_id: :accounts_id }, + account_history: { id: :account_history_id, external_key: :accounts_id, payment_method_id: :payment_methods_id }, + account_emails: { id: :account_emails_id }, account_email_history: { id: :account_email_history_id }, + subscription_events: { id: :subscription_events_id }, subscriptions: { id: :subscriptions_id }, + bundles: { id: :bundles_id }, blocking_states: { id: :blocking_states_id, blockable_id: nil }, + invoice_items: { id: :invoice_items_id, child_account_id: nil, invoice_id: :invoices_id, bundle_id: :bundles_id, subscription_id: :subscriptions_id }, + invoices: { id: :invoices_id }, + invoice_payments: { id: :invoice_payments_id, invoice_id: :invoices_id, payment_id: :payments_id }, + invoice_parent_children: { id: :invoice_parent_children_id, parent_invoice_id: nil, child_invoice_id: nil, child_account_id: nil }, + payment_attempts: { id: :payment_attempts_id, payment_method_id: :payment_methods_id, transaction_id: :payment_transactions_id }, + payment_attempt_history: { id: :payment_attempt_history_id, payment_method_id: :payment_methods_id, transaction_id: :payment_transactions_id }, + payment_methods: { id: :payment_methods_id, external_key: :generate }, payment_method_history: { id: :payment_method_history_id }, + payments: { id: :payments_id, payment_method_id: :payment_methods_id }, + payment_history: { id: :payment_history_id, payment_method_id: :payment_methods_id }, + payment_transactions: { id: :payment_transactions_id, payment_id: :payments_id }, + payment_transaction_history: { id: :payment_transaction_history_id, payment_id: :payments_id }, + _invoice_payment_control_plugin_auto_pay_off: { payment_method_id: :payment_methods_id, payment_id: :payments_id }, + rolled_up_usage: { id: :rolled_up_usage_id, subscription_id: :subscriptions_id, tracking_id: nil }, + custom_fields: { id: :custom_fields_id }, custom_field_history: { id: :custom_field_history_id }, + tag_definitions: { id: :tag_definitions_id }, tag_definition_history: { id: :tag_definition_history_id }, + tags: { id: :tags_id, object_id: nil }, + tag_history: { id: :tag_history_id, object_id: nil }, + audit_log: { id: :audit_log_id } }.freeze + + # delimeters to sniff + DELIMITERS = [',', '|'].freeze + DEFAULT_DELIMITER = '|' + + B64_REGEX = %r{^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)?$}.freeze def initialize(config_file = nil, killbill_api_credentials = nil, killbill_credentials = nil, killbill_url = nil, database_name = nil, database_credentials = nil, database_host = nil, database_port = nil, data_delimiter = nil, logger = nil) @killbill_api_key = KILLBILL_API_KEY - @killbill_api_secrets = KILLBILL_API_SECRET + @killbill_api_secret = KILLBILL_API_SECRET @killbill_url = KILLBILL_URL @killbill_user = KILLBILL_USER @killbill_password = KILLBILL_PASSWORD @delimiter = data_delimiter || DEFAULT_DELIMITER @logger = logger - @tables_id = Hash.new + @tables_id = {} + set_killbill_options(killbill_api_credentials, killbill_credentials, killbill_url) - set_killbill_options(killbill_api_credentials,killbill_credentials,killbill_url) - set_database_options(database_host,database_port,database_name,database_credentials,logger) + database_credentials ||= [nil, nil] + @database = Database.new(database_name, database_host, database_port, database_credentials[0], database_credentials[1], logger) load_config_from_file(config_file) - end def export_data(account_id = nil) - - if account_id === :export.to_s - raise Interrupt, 'Need to specify an account id' - end + raise Interrupt, 'Need to specify an account id' if account_id == :export.to_s export_data = fetch_export_data(account_id) export_file = export(export_data) - unless File.exist?(export_file) - raise Interrupt, 'Account id not found' - else - @logger.info "\e[32mData exported under #{export_file}\e[0m" - end + raise Interrupt, 'Account id not found' unless File.exist?(export_file) + + @logger.info "\e[32mData exported under #{export_file}\e[0m" export_file end def import_data(source_file, tenant_record_id, skip_payment_methods, round_trip_export_import = false, generate_record_id = false) + source_file = File.expand_path(source_file) @generate_record_id = generate_record_id @tenant_record_id = tenant_record_id @round_trip_export_import = round_trip_export_import - if source_file === :import.to_s - raise Interrupt, 'Need to specify a file' - end + raise Interrupt, 'Need to specify a file' if source_file == :import.to_s - unless File.exist?(source_file) - raise Interrupt, 'Need to specify a valid file' - end + raise Interrupt, "File #{source_file} does not exist" unless File.exist?(source_file) @delimiter = sniff_delimiter(source_file) || @delimiter @@ -129,393 +123,330 @@ def import_data(source_file, tenant_record_id, skip_payment_methods, round_trip_ private - # export helpers: fetch_export_data; export; process_export_data; remove_export_data; - def fetch_export_data(account_id) - KillBillClient.url = @killbill_url - options = { - :username => @killbill_user, - :password => @killbill_password, - :api_key => @killbill_api_key, - :api_secret => @killbill_api_secrets - } - - begin - account_data = KillBillClient::Model::Export.find_by_account_id(account_id, 'KPM', options) - rescue Exception => e - raise Interrupt, 'Account id not found' - end - - account_data + # export helpers: fetch_export_data; export; process_export_data; remove_export_data; + def fetch_export_data(account_id) + KillBillClient.url = @killbill_url + options = { + username: @killbill_user, + password: @killbill_password, + api_key: @killbill_api_key, + api_secret: @killbill_api_secret + } + + begin + account_data = KillBillClient::Model::Export.find_by_account_id(account_id, 'KPM', options) + rescue StandardError + raise Interrupt, 'Account id not found' end - def export(export_data) - export_file = TMP_DIR + File::SEPARATOR + 'kbdump' - - open (export_file), 'w' do |io| - - table_name = nil - cols_names = nil - export_data.split("\n").each do |line| - words = line.strip.split(" ") - clean_line = line - if not /--/.match(words[0]).nil? - table_name = words[1] - cols_names = words[2].strip.split(@delimiter) - elsif not table_name.nil? - clean_line = process_export_data(line,table_name,cols_names) - end - io.puts clean_line + account_data + end + def export(export_data) + export_file = TMP_DIR + File::SEPARATOR + 'kbdump' + + File.open(export_file, 'w') do |io| + table_name = nil + cols_names = nil + export_data.split("\n").each do |line| + words = line.strip.split(' ') + clean_line = line + if !/--/.match(words[0]).nil? + table_name = words[1] + cols_names = words[2].strip.split(@delimiter) + elsif !table_name.nil? + clean_line = process_export_data(line, table_name, cols_names) end - + io.puts clean_line end - - export_file end - def process_export_data(line_to_process, table_name, cols_names) - clean_line = line_to_process - - row = [] - cols = clean_line.strip.split(@delimiter) - cols_names.each_with_index { |col_name, index| - sanitized_value = remove_export_data(table_name,col_name,cols[index]) - - row << sanitized_value + export_file + end - } + def process_export_data(line_to_process, table_name, cols_names) + clean_line = line_to_process - clean_line = row.join(@delimiter) + row = [] + cols = clean_line.strip.split(@delimiter) + cols_names.each_with_index do |col_name, index| + sanitized_value = remove_export_data(table_name, col_name, cols[index]) - clean_line + row << sanitized_value end - def remove_export_data(table_name,col_name,value) - - if not REMOVE_DATA_FROM[table_name.to_sym].nil? + row.join(@delimiter) + end - if REMOVE_DATA_FROM[table_name.to_sym].include? col_name.to_sym - return nil - end + def remove_export_data(table_name, col_name, value) + unless REMOVE_DATA_FROM[table_name.to_sym].nil? - end + return nil if REMOVE_DATA_FROM[table_name.to_sym].include? col_name.to_sym - value end - # import helpers: sanitize_and_import; import; sanitize; replace_tenant_record_id; replace_account_record_id; replace_boolean; - # fix_dates; fill_empty_column; - def sanitize_and_import(source_file, skip_payment_methods) - tables = Hash.new - error_importing_data = false - - open (source_file), 'r' do |data| - - rows = nil; - table_name = nil; - cols_names = nil; - - data.each_line do |line| - words = line.strip.split(" ") - - if /--/.match(words[0]) - unless table_name.nil? - if @generate_record_id - cols_names.shift - end + value + end - tables[table_name] = { :col_names => cols_names, :rows => rows}; - end + # import helpers: sanitize_and_import; import; sanitize; replace_tenant_record_id; replace_account_record_id; replace_boolean; + # fix_dates; fill_empty_column; + def sanitize_and_import(source_file, skip_payment_methods) + tables = {} + error_importing_data = false - table_name = words[1] - cols_names = words[2].strip.split(@delimiter) + File.open(source_file, 'r:UTF-8') do |data| + rows = nil + table_name = nil + cols_names = nil - rows = [] - elsif not table_name.nil? - row = process_import_data(line, table_name,cols_names, skip_payment_methods, rows) + data.each_line do |line| + words = line.strip.split(' ') - next if row.nil? + if /--/.match(words[0]) + unless table_name.nil? + cols_names.shift if @generate_record_id - rows.push(row) - else - error_importing_data = true - break + tables[table_name] = { col_names: cols_names, rows: rows } end - end - if not ( table_name.nil? || error_importing_data ) - if @generate_record_id - cols_names.shift - end + table_name = words[1] + cols_names = words[2].strip.split(@delimiter) - tables[table_name] = { :col_names => cols_names, :rows => rows}; - end + rows = [] + elsif !table_name.nil? + row = process_import_data(line, table_name, cols_names, skip_payment_methods, rows) - if tables.empty? + next if row.nil? + + rows.push(row) + else error_importing_data = true + break end end - unless error_importing_data - import(tables) - else - raise Interrupt, "Data on #{source_file} is invalid" + unless table_name.nil? || error_importing_data + cols_names.shift if @generate_record_id + + tables[table_name] = { col_names: cols_names, rows: rows } end + error_importing_data = true if tables.empty? end - def process_import_data(line, table_name, cols_names, skip_payment_methods, rows) - # to make sure that the last column is not omitted if is empty - cols = line.strip.split(@delimiter,line.count(@delimiter)+1) - - if cols_names.size != cols.size - @logger.warn "\e[32mWARNING!!! On #{table_name} table there is a mismatch on column count[#{cols.size}] versus header count[#{cols_names.size}]\e[0m" - return nil - end + raise Interrupt, "Data on #{source_file} is invalid" if error_importing_data - row = [] + import(tables) + end - cols_names.each_with_index do |col_name, index| - sanitized_value = sanitize(table_name,col_name,cols[index], skip_payment_methods) + def process_import_data(line, table_name, cols_names, skip_payment_methods, _rows) + # to make sure that the last column is not omitted if is empty + cols = line.strip.split(@delimiter, line.count(@delimiter) + 1) - unless sanitized_value.nil? - row << sanitized_value - end - end - - return row + if cols_names.size != cols.size + @logger.warn "\e[32mWARNING!!! On #{table_name} table there is a mismatch on column count[#{cols.size}] versus header count[#{cols_names.size}]\e[0m" + return nil end - def import(tables) - record_id = nil; - statements = Database.generate_insert_statement(tables) - statements.each do |statement| - response = Database.execute_insert_statement(statement[:table_name],statement[:query], statement[:qty_to_insert], statement[:table_data],record_id) + row = [] - if statement[:table_name] == 'accounts' && response.is_a?(String) - record_id = {:variable => '@account_record_id', :value => response} - end - - if response === false - break - end - end + @logger.debug "Processing table_name=#{table_name}, line=#{line}" + cols_names.each_with_index do |col_name, index| + sanitized_value = sanitize(table_name, col_name, cols[index], skip_payment_methods) + row << sanitized_value unless sanitized_value.nil? end - def sanitize(table_name,column_name,value,skip_payment_methods) - sanitized_value = replace_boolean(value) - sanitized_value = fill_empty_column(sanitized_value) + row + end - if table_name == 'payment_methods' && skip_payment_methods && column_name == PLUGIN_NAME_COLUMN - sanitized_value = SAFE_PAYMENT_METHOD - end + def import(tables) + record_id = nil + statements = @database.generate_insert_statement(tables) + statements.each do |statement| + response = @database.execute_insert_statement(statement[:table_name], statement[:query], statement[:qty_to_insert], statement[:table_data], record_id) - if DATE_COLUMNS_TO_FIX.include? column_name - sanitized_value = fix_dates(sanitized_value) - end + record_id = { variable: '@account_record_id', value: response } if statement[:table_name] == 'accounts' && response.is_a?(String) - if not @tenant_record_id.nil? - sanitized_value = replace_tenant_record_id(table_name,column_name,sanitized_value) - end + break unless response + end + end - if @generate_record_id - sanitized_value = replace_account_record_id(table_name,column_name,sanitized_value) - end + def sanitize(table_name, column_name, value, skip_payment_methods) + sanitized_value = replace_boolean(value) - if @round_trip_export_import - sanitized_value = replace_uuid(table_name,column_name,sanitized_value) - end + sanitized_value = fill_empty_column(sanitized_value) - sanitized_value - end + sanitized_value = SAFE_PAYMENT_METHOD if table_name == 'payment_methods' && skip_payment_methods && column_name == PLUGIN_NAME_COLUMN - def replace_tenant_record_id(table_name,column_name,value) - return @tenant_record_id if column_name == 'tenant_record_id' || column_name == 'search_key2' - value - end + sanitized_value = fix_dates(sanitized_value) if DATE_COLUMNS_TO_FIX.include? column_name - def replace_account_record_id(table_name,column_name,value) + sanitized_value = replace_tenant_record_id(table_name, column_name, sanitized_value) unless @tenant_record_id.nil? - if column_name == 'account_record_id' + sanitized_value = replace_account_record_id(table_name, column_name, sanitized_value) if @generate_record_id - return :@account_record_id - end + sanitized_value = replace_uuid(table_name, column_name, sanitized_value) if @round_trip_export_import - if column_name == 'record_id' - return nil - end + sanitized_value = b64_decode_if_needed(sanitized_value) if column_name == 'billing_events' - if column_name == 'target_record_id' + sanitized_value + end - if table_name == 'account_history' - return :@account_record_id - end - end + def replace_tenant_record_id(_table_name, column_name, value) + return @tenant_record_id if %w[tenant_record_id search_key2].include?(column_name) - if column_name == 'search_key1' && table_name == 'bus_ext_events_history' - return :@account_record_id - end + value + end - if column_name == 'search_key1' && table_name == 'bus_events_history' - return :@account_record_id - end + def replace_account_record_id(table_name, column_name, value) + return :@account_record_id if column_name == 'account_record_id' - value + return nil if column_name == 'record_id' - end + if column_name == 'target_record_id' - def replace_boolean(value) - if value.to_s === 'true' - return 1 - elsif value.to_s === 'false' - return 0 - else - return value - end + return :@account_record_id if table_name == 'account_history' end - def fix_dates(value) - if !value.equal?(:DEFAULT) + return :@account_record_id if column_name == 'search_key1' && table_name == 'bus_ext_events_history' - dt = DateTime.parse(value) - return dt.strftime('%F %T').to_s + return :@account_record_id if column_name == 'search_key1' && table_name == 'bus_events_history' - end + value + end + def replace_boolean(value) + if value.to_s == 'true' + 1 + elsif value.to_s == 'false' + 0 + else value end + end - def fill_empty_column(value) - if value.to_s.strip.empty? - return :DEFAULT - else - return value - end - end - - def replace_uuid(table_name,column_name,value) - - if column_name == 'id' - @tables_id["#{table_name}_id"] = SecureRandom.uuid - end - - if ROUND_TRIP_EXPORT_IMPORT_MAP[table_name.to_sym] && ROUND_TRIP_EXPORT_IMPORT_MAP[table_name.to_sym][column_name.to_sym] - key = ROUND_TRIP_EXPORT_IMPORT_MAP[table_name.to_sym][column_name.to_sym] + def fix_dates(value) + unless value.equal?(:DEFAULT) - if key.equal?(:generate) - new_value = SecureRandom.uuid - else - new_value = @tables_id[key.to_s] - end - - if new_value.nil? - new_value = SecureRandom.uuid - @tables_id[key.to_s] = new_value - end - return new_value - end + dt = DateTime.parse(value) + return dt.strftime('%F %T').to_s - if not ROUND_TRIP_EXPORT_IMPORT_MAP[:all][column_name.to_sym].nil? - key = ROUND_TRIP_EXPORT_IMPORT_MAP[:all][column_name.to_sym] - new_value = @tables_id[key.to_s] + end - return new_value - end + value + end + def fill_empty_column(value) + if value.to_s.strip.empty? + :DEFAULT + else value end + end - def sniff_delimiter(file) - - return nil if File.size?(file).nil? - - first_line = File.open(file) {|f| f.readline} + def replace_uuid(table_name, column_name, value) + @tables_id["#{table_name}_id"] = SecureRandom.uuid if column_name == 'id' - return nil if first_line.nil? + if ROUND_TRIP_EXPORT_IMPORT_MAP[table_name.to_sym] && ROUND_TRIP_EXPORT_IMPORT_MAP[table_name.to_sym][column_name.to_sym] + key = ROUND_TRIP_EXPORT_IMPORT_MAP[table_name.to_sym][column_name.to_sym] - sniff = {} + new_value = if key.equal?(:generate) + SecureRandom.uuid + else + @tables_id[key.to_s] + end - DELIMITERS.each do |delimiter| - sniff[delimiter] = first_line.count(delimiter) + if new_value.nil? + new_value = SecureRandom.uuid + @tables_id[key.to_s] = new_value end - - sniff = sniff.sort {|a,b| b[1]<=>a[1]} - sniff.size > 0 ? sniff[0][0] : nil + return new_value end - # helper methods that set up killbill and database options: load_config_from_file; set_config; set_database_options; - # set_killbill_options; - def load_config_from_file(config_file) + unless ROUND_TRIP_EXPORT_IMPORT_MAP[:all][column_name.to_sym].nil? + key = ROUND_TRIP_EXPORT_IMPORT_MAP[:all][column_name.to_sym] + new_value = @tables_id[key.to_s] - set_config(config_file) + return new_value + end - if not @config.nil? - config_killbill = @config['killbill'] + value + end - if not config_killbill.nil? - set_killbill_options([config_killbill['api_key'],config_killbill['api_secret']], - [config_killbill['user'],config_killbill['password']], - "http://#{config_killbill['host']}:#{config_killbill['port']}") - end + def b64_decode_if_needed(input) + # Exclude nil or non string + return input if input.nil? || !input.is_a?(String) + # Apply regex to check that string is built as a B64 string: the character set is [A-Z, a-z, 0-9, and + /] + # and if the rest length is less than 4, the string is padded with '=' characters. + return input if input.match(B64_REGEX).nil? - config_db = @config['database'] + # Decode + result = Base64.decode64(input) + # Verify encoded of the decoded value == input prior return result + return input if Base64.strict_encode64(result) != input - if not config_db.nil? - set_database_options(config_db['host'],config_db['name'], - [config_db['username'],config_db['password']], - @logger) + Blob.new(result, TMP_DIR) + end - end - end - end + def sniff_delimiter(file) + return nil if File.size?(file).nil? - def set_config(config_file = nil) - @config = nil + first_line = File.open(file, &:readline) - if not config_file.nil? - if not Dir[config_file][0].nil? - @config = YAML::load_file(config_file) - end - end + return nil if first_line.nil? + sniff = {} + + DELIMITERS.each do |delimiter| + sniff[delimiter] = first_line.count(delimiter) end - def set_database_options(database_host = nil, database_port = nil, database_name = nil, database_credentials = nil, logger) + sniff = sniff.sort { |a, b| b[1] <=> a[1] } + !sniff.empty? ? sniff[0][0] : nil + end - Database.set_logger(logger) + def load_config_from_file(config_file) + self.config = config_file - Database.set_credentials(database_credentials[0],database_credentials[1]) unless database_credentials.nil? - Database.set_database_name(database_name) unless database_name.nil? - Database.set_host(database_host) unless database_host.nil? - Database.set_port(database_port) unless database_port.nil? + return if @config.nil? - Database.set_mysql_command_line + config_killbill = @config['killbill'] + + unless config_killbill.nil? + set_killbill_options([config_killbill['api_key'], config_killbill['api_secret']], + [config_killbill['user'], config_killbill['password']], + "http://#{config_killbill['host']}:#{config_killbill['port']}") end - def set_killbill_options(killbill_api_credentials, killbill_credentials, killbill_url) + config_db = @config['database'] - if not killbill_api_credentials.nil? + @database = Database.new(config_db['name'], config_db['host'], config_db['port'], config_db['username'], config_db['password'], @logger) unless config_db.nil? + end - @killbill_api_key = killbill_api_credentials[0] - @killbill_api_secrets = killbill_api_credentials[1] + def config=(config_file = nil) + @config = nil - end + return if config_file.nil? - if not killbill_credentials.nil? + @config = YAML.load_file(config_file) unless Dir[config_file][0].nil? + end - @killbill_user = killbill_credentials[0] - @killbill_password = killbill_credentials[1] + def set_killbill_options(killbill_api_credentials, killbill_credentials, killbill_url) + unless killbill_api_credentials.nil? - end + @killbill_api_key = killbill_api_credentials[0] + @killbill_api_secret = killbill_api_credentials[1] - if not killbill_url.nil? + end - @killbill_url = killbill_url + unless killbill_credentials.nil? + + @killbill_user = killbill_credentials[0] + @killbill_password = killbill_credentials[1] - end end + @killbill_url = killbill_url unless killbill_url.nil? + end end - -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/base_artifact.rb b/kpm/lib/kpm/base_artifact.rb index 25d5107a..f870c080 100644 --- a/kpm/lib/kpm/base_artifact.rb +++ b/kpm/lib/kpm/base_artifact.rb @@ -1,8 +1,9 @@ +# frozen_string_literal: true + require 'digest/sha1' require 'rexml/document' module KPM - class ArtifactCorruptedException < IOError def message 'Downloaded artifact failed checksum verification' @@ -34,34 +35,34 @@ class BaseArtifact KAUI_CLASSIFIER = nil class << self - def pull(logger, group_id, artifact_id, packaging='jar', classifier=nil, version='LATEST', destination_path=nil, sha1_file=nil, force_download=false, verify_sha1=true, overrides={}, ssl_verify=true) - coordinate_map = {:group_id => group_id, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier, :version => version} + def pull(logger, group_id, artifact_id, packaging = 'jar', classifier = nil, version = 'LATEST', destination_path = nil, sha1_file = nil, force_download = false, verify_sha1 = true, overrides = {}, ssl_verify = true) + coordinate_map = { group_id: group_id, artifact_id: artifact_id, packaging: packaging, classifier: classifier, version: version } pull_and_put_in_place(logger, coordinate_map, nil, destination_path, false, sha1_file, force_download, verify_sha1, overrides, ssl_verify) end - def pull_from_fs(logger, file_path, destination_path=nil) + def pull_from_fs(logger, file_path, destination_path = nil) pull_from_fs_and_put_in_place(logger, file_path, destination_path) end - def nexus_remote(overrides={}, ssl_verify=true, logger=nil) + def nexus_remote(overrides = {}, ssl_verify = true, logger = nil) # overrides typically comes from the kpm.yml where we expect keys as String - overrides_sym = (overrides || {}).each_with_object({}) {|(k,v), h| h[k.to_sym] = v} + overrides_sym = (overrides || {}).each_with_object({}) { |(k, v), h| h[k.to_sym] = v } nexus_config = nexus_defaults.merge(overrides_sym) - nexus_remote ||= KPM::NexusFacade::RemoteFactory.create(nexus_config, ssl_verify, logger) + KPM::NexusFacade::RemoteFactory.create(nexus_config, ssl_verify, logger) end def nexus_defaults { - url: 'https://oss.sonatype.org', - repository: 'releases' + url: 'https://oss.sonatype.org', + repository: 'releases' } end protected - def pull_and_put_in_place(logger, coordinate_map, plugin_name, destination_path=nil, skip_top_dir=true, sha1_file=nil, force_download=false, verify_sha1=true, overrides={}, ssl_verify=true) + def pull_and_put_in_place(logger, coordinate_map, plugin_name, destination_path = nil, skip_top_dir = true, sha1_file = nil, force_download = false, verify_sha1 = true, overrides = {}, ssl_verify = true) # Build artifact info - artifact_info = artifact_info(logger, coordinate_map, overrides, ssl_verify) + artifact_info = artifact_info(logger, coordinate_map, sha1_file, force_download, overrides, ssl_verify) artifact_info[:plugin_name] = plugin_name populate_fs_info(artifact_info, destination_path) @@ -104,7 +105,7 @@ def pull_and_put_in_place(logger, coordinate_map, plugin_name, destination_path= logger.info " Starting download of #{coordinates} to #{tmp_destination_dir}" downloaded_artifact_info = pull_and_verify(logger, artifact_info[:sha1], coordinates, tmp_destination_dir, sha1_file, verify_sha1, overrides, ssl_verify) - remove_old_default_bundles(coordinate_map,artifact_info,downloaded_artifact_info) + remove_old_default_bundles(coordinate_map, artifact_info, downloaded_artifact_info) if artifact_info[:is_tgz] artifact_info[:bundle_dir] = Utils.unpack_tgz(downloaded_artifact_info[:file_path], artifact_info[:dir_name], skip_top_dir) FileUtils.rm downloaded_artifact_info[:file_path] @@ -119,11 +120,11 @@ def pull_and_put_in_place(logger, coordinate_map, plugin_name, destination_path= end # Logic similar than pull_and_put_in_place above - def pull_from_fs_and_put_in_place(logger, file_path, destination_path=nil) + def pull_from_fs_and_put_in_place(logger, file_path, destination_path = nil) artifact_info = { - :skipped => false, - :repository_path => file_path, - :is_tgz => file_path.end_with?('.tar.gz') || file_path.end_with?('.tgz') + skipped: false, + repository_path: file_path, + is_tgz: file_path.end_with?('.tar.gz') || file_path.end_with?('.tgz') } populate_fs_info(artifact_info, destination_path) @@ -148,7 +149,7 @@ def skip_if_exists(artifact_info, coordinates, sha1_file) return false if artifact_info[:sha1].nil? # If there is no such sha1_file, we don't skip - return false if sha1_file.nil? || !File.exists?(sha1_file) + return false if sha1_file.nil? || !File.exist?(sha1_file) # # At this point we have a valid sha1_file and a remote sha1 @@ -168,19 +169,25 @@ def skip_if_exists(artifact_info, coordinates, sha1_file) end end - def artifact_info(logger, coordinate_map, overrides={}, ssl_verify=true) + def artifact_info(logger, coordinate_map, sha1_file = nil, force_download = false, overrides = {}, ssl_verify = true) info = { - :skipped => false + skipped: false } + sha1_checker = sha1_file ? Sha1Checker.from_file(sha1_file) : nil + coordinates = KPM::Coordinates.build_coordinates(coordinate_map) begin nexus_info = nexus_remote(overrides, ssl_verify, logger).get_artifact_info(coordinates) rescue KPM::NexusFacade::ArtifactMalformedException => e - raise StandardError.new("Invalid coordinates #{coordinate_map}") + raise StandardError, "Invalid coordinates #{coordinate_map}: #{e}" rescue StandardError => e - logger.warn("Unable to retrieve coordinates #{coordinate_map}") - raise e + logger.warn("Unable to retrieve coordinates #{coordinate_map}: #{e}") + cached_coordinates = sha1_checker ? sha1_checker.artifact_info(coordinates) : nil + raise e if force_download || !cached_coordinates + + # Use the cache + return cached_coordinates end xml = REXML::Document.new(nexus_info) @@ -189,12 +196,14 @@ def artifact_info(logger, coordinate_map, overrides={}, ssl_verify=true) info[:repository_path] = xml.elements['//repositoryPath'].text unless xml.elements['//repositoryPath'].nil? info[:is_tgz] = info[:repository_path].end_with?('.tar.gz') || info[:repository_path].end_with?('.tgz') + sha1_checker.cache_artifact_info(coordinates, info) if sha1_checker + info end def update_destination_path(info, destination_path) # In case LATEST was specified, use the actual version as the directory name - destination_path = KPM::root if destination_path.nil? + destination_path = KPM.root if destination_path.nil? plugin_dir, version_dir = File.split(destination_path) destination_path = Pathname.new(plugin_dir).join(info[:version]).to_s if version_dir == 'LATEST' && !info[:version].nil? destination_path @@ -221,13 +230,14 @@ def populate_fs_info(info, destination_path) destination_path end - def pull_and_verify(logger, remote_sha1, coordinates, destination_dir, sha1_file, verify_sha1, overrides={}, ssl_verify=true) + def pull_and_verify(logger, remote_sha1, coordinates, destination_dir, sha1_file, verify_sha1, overrides = {}, ssl_verify = true) info = nexus_remote(overrides, ssl_verify, logger).pull_artifact(coordinates, destination_dir) # Always verify sha1 and if incorrect either throw or log when we are asked to bypass sha1 verification verified = verify(logger, coordinates, info[:file_path], remote_sha1) - if !verified + unless verified raise ArtifactCorruptedException if verify_sha1 + logger.warn("Skip sha1 verification for #{coordinates}") end @@ -248,13 +258,10 @@ def verify(logger, coordinates, file_path, remote_sha1) local_sha1 = Digest::SHA1.file(file_path).hexdigest res = local_sha1 == remote_sha1 - if !res - logger.warn("Sha1 verification failed for #{coordinates} : local_sha1 = #{local_sha1}, remote_sha1 = #{remote_sha1}") - end + logger.warn("Sha1 verification failed for #{coordinates} : local_sha1 = #{local_sha1}, remote_sha1 = #{remote_sha1}") unless res res end - # Magic methods... def path_looks_like_a_directory(path) @@ -265,12 +272,12 @@ def path_looks_like_a_directory(path) last_part = File.basename(path).downcase - %w(.pom .xml .war .jar .xsd .tar.gz .tgz .gz .zip).each do |classic_file_extension| + %w[.pom .xml .war .jar .xsd .tar.gz .tgz .gz .zip].each do |classic_file_extension| return false if last_part.end_with?(classic_file_extension) end # Known magic files - %w(root).each do |classic_filename| + %w[root].each do |classic_filename| return false if last_part == classic_filename end @@ -286,12 +293,9 @@ def remove_old_default_bundles(coordinate_map, artifact_info, downloaded_artifac existing_default_bundles.each do |bundle| bundle_name = Utils.get_plugin_name_from_file_path(bundle) - is_downloaded = downloaded_default_bundles.index {|file_name| file_name.include? bundle_name} - unless is_downloaded.nil? - FileUtils.remove(bundle) - end + is_downloaded = downloaded_default_bundles.index { |file_name| file_name.include? bundle_name } + FileUtils.remove(bundle) unless is_downloaded.nil? end - end end end diff --git a/kpm/lib/kpm/base_installer.rb b/kpm/lib/kpm/base_installer.rb index 3502c9b4..39d4946d 100644 --- a/kpm/lib/kpm/base_installer.rb +++ b/kpm/lib/kpm/base_installer.rb @@ -1,9 +1,10 @@ +# frozen_string_literal: true + require 'pathname' require 'zip' module KPM class BaseInstaller - LATEST_VERSION = 'LATEST' SHA1_FILENAME = 'sha1.yml' DEFAULT_BUNDLES_DIR = Pathname.new('/var').join('tmp').join('bundles').to_s @@ -15,13 +16,13 @@ def initialize(logger, nexus_config = nil, nexus_ssl_verify = nil) @trace_logger = KPM::TraceLogger.new end - def install_killbill_server(specified_group_id=nil, specified_artifact_id=nil, specified_packaging=nil, specified_classifier=nil, specified_version=nil, specified_webapp_path=nil, bundles_dir=nil, force_download=false, verify_sha1=true) + def install_killbill_server(specified_group_id = nil, specified_artifact_id = nil, specified_packaging = nil, specified_classifier = nil, specified_version = nil, specified_webapp_path = nil, bundles_dir = nil, force_download = false, verify_sha1 = true) group_id = specified_group_id || KPM::BaseArtifact::KILLBILL_GROUP_ID artifact_id = specified_artifact_id || KPM::BaseArtifact::KILLBILL_ARTIFACT_ID packaging = specified_packaging || KPM::BaseArtifact::KILLBILL_PACKAGING classifier = specified_classifier || KPM::BaseArtifact::KILLBILL_CLASSIFIER version = specified_version || LATEST_VERSION - webapp_path = specified_webapp_path || KPM::root + webapp_path = specified_webapp_path || KPM.root bundles_dir = Pathname.new(bundles_dir || DEFAULT_BUNDLES_DIR).expand_path sha1_file = "#{bundles_dir}/#{SHA1_FILENAME}" @@ -32,60 +33,55 @@ def install_killbill_server(specified_group_id=nil, specified_artifact_id=nil, s @logger.debug("Installing Kill Bill server: group_id=#{group_id} artifact_id=#{artifact_id} packaging=#{packaging} classifier=#{classifier} version=#{version} webapp_path=#{webapp_path}") artifact_info = KPM::KillbillServerArtifact.pull(@logger, - group_id, - artifact_id, - packaging, - classifier, - version, - webapp_path, - sha1_file, - force_download, - verify_sha1, - @nexus_config, - @nexus_ssl_verify) + group_id, + artifact_id, + packaging, + classifier, + version, + webapp_path, + sha1_file, + force_download, + verify_sha1, + @nexus_config, + @nexus_ssl_verify) # store trace info to be returned as JSON by the KPM::Installer.install method - @trace_logger.add('killbill', - artifact_info.merge({'status'=> (artifact_info[:skipped] ? 'UP_TO_DATE': 'INSTALLED'), - :group_id => group_id, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier})) + @trace_logger.add('killbill', nil, + artifact_info.merge('status' => (artifact_info[:skipped] ? 'UP_TO_DATE' : 'INSTALLED'), + :group_id => group_id, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier)) end - def install_kaui(specified_group_id=nil, specified_artifact_id=nil, specified_packaging=nil, specified_classifier=nil, specified_version=nil, specified_webapp_path=nil, bundles_dir=nil, force_download=false, verify_sha1=true) + def install_kaui(specified_group_id = nil, specified_artifact_id = nil, specified_packaging = nil, specified_classifier = nil, specified_version = nil, specified_webapp_path = nil, bundles_dir = nil, force_download = false, verify_sha1 = true) group_id = specified_group_id || KPM::BaseArtifact::KAUI_GROUP_ID artifact_id = specified_artifact_id || KPM::BaseArtifact::KAUI_ARTIFACT_ID packaging = specified_packaging || KPM::BaseArtifact::KAUI_PACKAGING classifier = specified_classifier || KPM::BaseArtifact::KAUI_CLASSIFIER version = specified_version || LATEST_VERSION - webapp_path = specified_webapp_path || KPM::root + webapp_path = specified_webapp_path || KPM.root bundles_dir = Pathname.new(bundles_dir || DEFAULT_BUNDLES_DIR).expand_path sha1_file = "#{bundles_dir}/#{SHA1_FILENAME}" @logger.debug("Installing Kaui: group_id=#{group_id} artifact_id=#{artifact_id} packaging=#{packaging} classifier=#{classifier} version=#{version} webapp_path=#{webapp_path}") artifact_info = KPM::KauiArtifact.pull(@logger, - group_id, - artifact_id, - packaging, - classifier, - version, - webapp_path, - sha1_file, - force_download, - verify_sha1, - @nexus_config, - @nexus_ssl_verify) + group_id, + artifact_id, + packaging, + classifier, + version, + webapp_path, + sha1_file, + force_download, + verify_sha1, + @nexus_config, + @nexus_ssl_verify) # store trace info to be returned as JSON by the KPM::Installer.install method - @trace_logger.add('kaui', - artifact_info.merge({'status'=> (artifact_info[:skipped] ? 'UP_TO_DATE': 'INSTALLED'), - :group_id => group_id, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier})) - - + @trace_logger.add('kaui', nil, + artifact_info.merge('status' => (artifact_info[:skipped] ? 'UP_TO_DATE' : 'INSTALLED'), + :group_id => group_id, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier)) end - def install_plugin(plugin_key, raw_kb_version=nil, specified_group_id=nil, specified_artifact_id=nil, specified_packaging=nil, specified_classifier=nil, specified_version=nil, bundles_dir=nil, specified_type=nil, force_download=false, verify_sha1=true, verify_jruby_jar=false) - + def install_plugin(plugin_key, raw_kb_version = nil, specified_group_id = nil, specified_artifact_id = nil, specified_packaging = nil, specified_classifier = nil, specified_version = nil, bundles_dir = nil, specified_type = nil, force_download = false, verify_sha1 = true, verify_jruby_jar = false) # plugin_key needs to exist - if plugin_key.nil? - raise ArgumentError.new 'Aborting installation: User needs to specify a pluginKey' - end + raise ArgumentError, 'Aborting installation: User needs to specify a pluginKey' if plugin_key.nil? # Lookup artifact and perform validation against input looked_up_group_id, looked_up_artifact_id, looked_up_packaging, looked_up_classifier, looked_up_version, looked_up_type = KPM::PluginsDirectory.lookup(plugin_key, true, raw_kb_version) @@ -95,21 +91,17 @@ def install_plugin(plugin_key, raw_kb_version=nil, specified_group_id=nil, speci validate_installation_arg!(plugin_key, 'type', specified_type, looked_up_type) validate_installation_arg!(plugin_key, 'classifier', specified_classifier, looked_up_classifier) - # If there is no entry in plugins_directory.yml and the group_id is not the killbill default group_id, the key provided must be a user key and must have a namespace if looked_up_artifact_id.nil? && - specified_group_id != KPM::BaseArtifact::KILLBILL_JAVA_PLUGIN_GROUP_ID && - specified_group_id != KPM::BaseArtifact::KILLBILL_RUBY_PLUGIN_GROUP_ID && - plugin_key.split(':').size == 1 - raise ArgumentError.new "Aborting installation: pluginKey = #{plugin_key} does not exist in plugin_directory.yml so format of the key must have a user namespace (e.g namespace:key)" + specified_group_id != KPM::BaseArtifact::KILLBILL_JAVA_PLUGIN_GROUP_ID && + specified_group_id != KPM::BaseArtifact::KILLBILL_RUBY_PLUGIN_GROUP_ID && + plugin_key.split(':').size == 1 + raise ArgumentError, "Aborting installation: pluginKey = #{plugin_key} does not exist in plugin_directory.yml so format of the key must have a user namespace (e.g namespace:key)" end - # Specified parameters have always precedence except for the artifact_id (to map stripe to stripe-plugin) artifact_id = looked_up_artifact_id || specified_artifact_id - if artifact_id.nil? - raise ArgumentError.new "Aborting installation: unable to lookup plugin #{specified_artifact_id}" - end + raise ArgumentError, "Aborting installation: unable to lookup plugin #{specified_artifact_id}" if artifact_id.nil? bundles_dir = Pathname.new(bundles_dir || DEFAULT_BUNDLES_DIR).expand_path plugins_dir = bundles_dir.join('plugins') @@ -136,10 +128,9 @@ def install_plugin(plugin_key, raw_kb_version=nil, specified_group_id=nil, speci _, plugin_name = plugins_manager.get_plugin_key_and_name(plugin_key) # Before we do the install we verify that the entry we have in the plugin_identifiers.json matches our current request - coordinate_map = {:group_id => group_id, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier} + coordinate_map = { group_id: group_id, artifact_id: artifact_id, packaging: packaging, classifier: classifier } validate_plugin_key!(plugins_dir, plugin_key, coordinate_map) - @logger.debug("Installing plugin: group_id=#{group_id} artifact_id=#{artifact_id} packaging=#{packaging} classifier=#{classifier} version=#{version} destination=#{destination}") artifact_info = KPM::KillbillPluginArtifact.pull(@logger, group_id, @@ -156,8 +147,8 @@ def install_plugin(plugin_key, raw_kb_version=nil, specified_group_id=nil, speci @nexus_ssl_verify) # store trace info to be returned as JSON by the KPM::Installer.install method @trace_logger.add('plugins', plugin_key, - artifact_info.merge({'status'=> (artifact_info[:skipped] ? 'UP_TO_DATE': 'INSTALLED'), - :group_id => group_id, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier})) + artifact_info.merge('status' => (artifact_info[:skipped] ? 'UP_TO_DATE' : 'INSTALLED'), + :group_id => group_id, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier)) # Update with resolved version coordinate_map[:version] = artifact_info[:version] @@ -169,14 +160,19 @@ def install_plugin(plugin_key, raw_kb_version=nil, specified_group_id=nil, speci artifact_info end + def install_plugin_from_fs(plugin_key, raw_file_path, name, version, bundles_dir = nil, type = 'java') + # Expand glob if needed + file_paths = Dir.glob(raw_file_path) + raise ArgumentError, "Cannot install plugin: no file found at #{raw_file_path}" if file_paths.empty? + raise ArgumentError, "Cannot install plugin: multiple files found at #{raw_file_path}" if file_paths.size > 1 + + file_path = file_paths[0] - def install_plugin_from_fs(plugin_key, file_path, name, version, bundles_dir=nil, type='java') bundles_dir = Pathname.new(bundles_dir || DEFAULT_BUNDLES_DIR).expand_path plugins_dir = bundles_dir.join('plugins') - if version.nil? - version = Utils.get_version_from_file_path(file_path) - end + version = Utils.get_version_from_file_path(file_path) if version.nil? + raise ArgumentError, 'Cannot install plugin: missing version' if version.nil? if type.to_s == 'java' plugin_name = name.nil? ? Utils.get_plugin_name_from_file_path(file_path) : name @@ -194,28 +190,26 @@ def install_plugin_from_fs(plugin_key, file_path, name, version, bundles_dir=nil # store trace info to be returned as JSON by the KPM::Installer.install method @trace_logger.add('plugins', plugin_key, - artifact_info.merge({'status'=>'INSTALLED'})) + artifact_info.merge('status' => 'INSTALLED')) artifact_info end - def uninstall_plugin(plugin_name_or_key, plugin_version=nil, bundles_dir=nil) + def uninstall_plugin(plugin_name_or_key, plugin_version = nil, bundles_dir = nil) bundles_dir = Pathname.new(bundles_dir || DEFAULT_BUNDLES_DIR).expand_path plugins_dir = bundles_dir.join('plugins') plugins_manager = PluginsManager.new(plugins_dir, @logger) plugin_key, plugin_name = plugins_manager.get_plugin_key_and_name(plugin_name_or_key) - if plugin_name.nil? - raise ArgumentError.new "Cannot uninstall plugin: Unknown plugin name or plugin key = #{plugin_name_or_key}" - end + raise ArgumentError, "Cannot uninstall plugin: Unknown plugin name or plugin key = #{plugin_name_or_key}" if plugin_name.nil? modified = plugins_manager.uninstall(plugin_name, plugin_version || :all) plugins_manager.remove_plugin_identifier_key(plugin_key) modified end - def install_default_bundles(bundles_dir, specified_version=nil, kb_version=nil, force_download=false, verify_sha1=true) + def install_default_bundles(bundles_dir, specified_version = nil, kb_version = nil, force_download = false, verify_sha1 = true) group_id = 'org.kill-bill.billing' artifact_id = 'killbill-platform-osgi-bundles-defaultbundles' packaging = 'tar.gz' @@ -223,7 +217,8 @@ def install_default_bundles(bundles_dir, specified_version=nil, kb_version=nil, version = specified_version if version.nil? || version == LATEST_VERSION - info = KPM::KillbillServerArtifact.info(kb_version || LATEST_VERSION, @nexus_config, @nexus_ssl_verify) + sha1_file = "#{bundles_dir}/#{SHA1_FILENAME}" + info = KPM::KillbillServerArtifact.info(kb_version || LATEST_VERSION, sha1_file, force_download, verify_sha1, @nexus_config, @nexus_ssl_verify) version = info['killbill-platform'] end version ||= LATEST_VERSION @@ -246,15 +241,13 @@ def install_default_bundles(bundles_dir, specified_version=nil, kb_version=nil, @nexus_config, @nexus_ssl_verify) - @trace_logger.add('default_bundles', - info.merge({'status'=> (info[:skipped] ? 'UP_TO_DATE': 'INSTALLED'), - :group_id => group_id, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier})) + @trace_logger.add('default_bundles', nil, + info.merge('status' => (info[:skipped] ? 'UP_TO_DATE' : 'INSTALLED'), + :group_id => group_id, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier)) # The special JRuby bundle needs to be called jruby.jar # TODO .first - code smell - unless info[:skipped] - File.rename Dir.glob("#{destination}/killbill-platform-osgi-bundles-jruby-*.jar").first, destination.join('jruby.jar') - end + File.rename Dir.glob("#{destination}/killbill-platform-osgi-bundles-jruby-*.jar").first, destination.join('jruby.jar') unless info[:skipped] info end @@ -262,21 +255,16 @@ def install_default_bundles(bundles_dir, specified_version=nil, kb_version=nil, private def validate_installation_arg!(plugin_key, arg_type, specified_arg, looked_up_arg) - # If nothing was specified, or if we don't find anything from the lookup, nothing to validate against - if specified_arg.nil? || looked_up_arg.nil? - return - end + return if specified_arg.nil? || looked_up_arg.nil? - if specified_arg.to_s != looked_up_arg.to_s - raise ArgumentError.new "Aborting installation for plugin_key #{plugin_key}: specified value #{specified_arg} for #{arg_type} does not match looked_up value #{looked_up_arg}" - end + raise ArgumentError, "Aborting installation for plugin_key #{plugin_key}: specified value #{specified_arg} for #{arg_type} does not match looked_up value #{looked_up_arg}" if specified_arg.to_s != looked_up_arg.to_s end def validate_plugin_key!(plugins_dir, plugin_key, coordinate_map) plugins_manager = PluginsManager.new(plugins_dir, @logger) res = plugins_manager.validate_plugin_identifier_key(plugin_key, coordinate_map) - raise ArgumentError.new "Failed to validate plugin key #{plugin_key}" if !res + raise ArgumentError, "Failed to validate plugin key #{plugin_key}" unless res end def update_plugin_identifier(plugins_dir, plugin_key, type, coordinate_map, artifact_info) @@ -288,7 +276,7 @@ def update_plugin_identifier(plugins_dir, plugin_key, type, coordinate_map, arti plugins_manager.add_plugin_identifier_key(plugin_key, plugin_name, type, coordinate_map) end - def mark_as_active(plugins_dir, artifact_info, artifact_id=nil) + def mark_as_active(plugins_dir, artifact_info, _artifact_id = nil) # Mark this bundle as active plugins_manager = PluginsManager.new(plugins_dir, @logger) plugins_manager.set_active(artifact_info[:bundle_dir]) @@ -297,14 +285,14 @@ def mark_as_active(plugins_dir, artifact_info, artifact_id=nil) def warn_if_jruby_jar_missing(bundles_dir) platform_dir = bundles_dir.join('platform') jruby_jar = platform_dir.join('jruby.jar') - if !File.exists?(jruby_jar) - @logger.warn(" Missing installation for jruby.jar under #{platform_dir}. This is required for ruby plugin installation"); + if !File.exist?(jruby_jar) + @logger.warn(" Missing installation for jruby.jar under #{platform_dir}. This is required for ruby plugin installation") else version = extract_jruby_jar_version(jruby_jar) if version @logger.info(" Detected jruby.jar version #{version}") else - @logger.warn(" Failed to detect jruby.jar version for #{jruby_jar}"); + @logger.warn(" Failed to detect jruby.jar version for #{jruby_jar}") end end end @@ -319,10 +307,9 @@ def extract_jruby_jar_version(jruby_jar) if selected_entries && selected_entries.size == 1 zip_entry = selected_entries[0] content = zip_entry.get_input_stream.read - return content.split("\n").select { |e| e.start_with?("version")}[0].split("=")[1] + return content.split("\n").select { |e| e.start_with?('version') }[0].split('=')[1] end nil end - end end diff --git a/kpm/lib/kpm/blob.rb b/kpm/lib/kpm/blob.rb new file mode 100644 index 00000000..047aedbc --- /dev/null +++ b/kpm/lib/kpm/blob.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module KPM + class Blob + def initialize(value, tmp_dir) + @tmp_dir = tmp_dir + @blob_file = @tmp_dir + File::SEPARATOR + rand.to_s + store_value(value) + end + + # On Macos systems, this will require defining a `secure_file_priv` config: + # + # e.g /usr/local/etc/my.cnf : + # [mysqld] + # ... + # secure_file_priv="" + def value + "LOAD_FILE(\"#{@blob_file}\")" + end + + private + + def store_value(value) + File.open(@blob_file, 'wb') do |file| + file.write(value) + end + end + end +end diff --git a/kpm/lib/kpm/cli.rb b/kpm/lib/kpm/cli.rb index 09e3471d..a4a93168 100644 --- a/kpm/lib/kpm/cli.rb +++ b/kpm/lib/kpm/cli.rb @@ -1,7 +1,9 @@ +# frozen_string_literal: true + require 'thor' module KPM class Cli < Thor include KPM::Tasks end -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/coordinates.rb b/kpm/lib/kpm/coordinates.rb index 9f311543..3b51d6ee 100644 --- a/kpm/lib/kpm/coordinates.rb +++ b/kpm/lib/kpm/coordinates.rb @@ -1,9 +1,8 @@ -module KPM +# frozen_string_literal: true +module KPM class Coordinates - class << self - def build_coordinates(coordinate_map) group_id = coordinate_map[:group_id] artifact_id = coordinate_map[:artifact_id] @@ -26,15 +25,13 @@ def get_coordinate_map(entry) parts = entry.split(':') length = parts.size if length == 3 - {:group_id => parts[0], :artifact_id => parts[1], :packaging => parts[2]} + { group_id: parts[0], artifact_id: parts[1], packaging: parts[2] } elsif length == 4 - {:group_id => parts[0], :artifact_id => parts[1], :packaging => parts[2], :version => parts[3]} + { group_id: parts[0], artifact_id: parts[1], packaging: parts[2], version: parts[3] } elsif length == 5 - {:group_id => parts[0], :artifact_id => parts[1], :packaging => parts[2], :classifier => parts[3], :version => parts[4]} + { group_id: parts[0], artifact_id: parts[1], packaging: parts[2], classifier: parts[3], version: parts[4] } end end - end - end -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/database.rb b/kpm/lib/kpm/database.rb index c5bd2768..4f1cc53a 100644 --- a/kpm/lib/kpm/database.rb +++ b/kpm/lib/kpm/database.rb @@ -1,131 +1,120 @@ +# frozen_string_literal: true + require 'tmpdir' module KPM - class Database - class << self - - # Mysql Information functions - LAST_INSERTED_ID = 'SELECT LAST_INSERT_ID();' - ROWS_UPDATED = 'SELECT ROW_COUNT();' - - # Destination database - DATABASE = ENV['DATABASE'] || 'killbill' - USERNAME = ENV['USERNAME'] || 'root' - PASSWORD = ENV['PASSWORD'] || 'root' - HOST = ENV['HOST'] || 'localhost' - PORT = ENV['PORT'] || '3306' - - COLUMN_NAME_POS = 3 - - STATEMENT_TMP_FILE = Dir.mktmpdir('statement') + File::SEPARATOR + 'statement.sql' - - MYSQL_COMMAND_LINE = "mysql #{DATABASE} --user=#{USERNAME} --password=#{PASSWORD} " - - @@mysql_command_line = MYSQL_COMMAND_LINE - @@username = USERNAME - @@password = PASSWORD - @@database = DATABASE - @@host = HOST - @@port = PORT - - def set_logger(logger) - @@logger = logger - end - - def set_credentials(user = nil, password = nil) - @@username = user - @@password = password - end - - def set_host(host) - @@host = host - end - - def set_port(port) - @@port = port - end + # Mysql Information functions + LAST_INSERTED_ID = 'SELECT LAST_INSERT_ID();' + ROWS_UPDATED = 'SELECT ROW_COUNT();' + + # Destination database + DATABASE = ENV['DATABASE'] || 'killbill' + USERNAME = ENV['USERNAME'] || 'root' + PASSWORD = ENV['PASSWORD'] || 'root' + HOST = ENV['HOST'] || 'localhost' + PORT = ENV['PORT'] || '3306' + + COLUMN_NAME_POS = 3 + + STATEMENT_TMP_FILE = Dir.mktmpdir('statement') + File::SEPARATOR + 'statement.sql' + + def initialize(database_name, host, port, username, password, logger) + @database_name = database_name || DATABASE + @host = host || HOST + @port = port || PORT + @username = username || USERNAME + @password = password || PASSWORD + @mysql_command_line = "mysql --max_allowed_packet=128M #{@database_name} --host=#{@host} --port=#{@port} --user=#{@username} --password=#{@password} " + + @logger = logger + end - def set_database_name(database_name = nil) - @@database = database_name - end + def execute_insert_statement(table_name, query, qty_to_insert, _table_data, record_id = nil) + query = "set #{record_id[:variable]}=#{record_id[:value]}; #{query}" unless record_id.nil? + query = "SET sql_mode = ''; SET autocommit=0; #{query} COMMIT; SHOW WARNINGS;" - def set_mysql_command_line - @@mysql_command_line = "mysql #{@@database} --host=#{@@host} --port=#{@@port} --user=#{@@username} --password=#{@@password} " + File.open(STATEMENT_TMP_FILE, 'w') do |s| + s.puts query end - def execute_insert_statement(table_name, query, qty_to_insert, table_data, record_id = nil) - - unless record_id.nil? - query = "set #{record_id[:variable]}=#{record_id[:value]}; #{query}" - end - query = "SET autocommit=0; #{query} COMMIT;" - - File.open(STATEMENT_TMP_FILE,'w') do |s| - s.puts query - end + response = `#{@mysql_command_line} < "#{STATEMENT_TMP_FILE}" 2>&1` - response = `#{@@mysql_command_line} < "#{STATEMENT_TMP_FILE}" 2>&1` - - if response.include? 'ERROR' - @@logger.error "\e[91;1mTransaction that fails to be executed\e[0m" - @@logger.error "\e[91m#{query}\e[0m" + if response.include? 'ERROR' + @logger.error "\e[91;1mTransaction that fails to be executed (first 1,000 chars)\e[0m" + # Queries can be really big (bulk imports) + @logger.error "\e[91m#{query[0..1000]}\e[0m" + if response.include?('Table') && response.include?('doesn\'t exist') + @logger.warn "Skipping unknown table #{table_name}...." + else raise Interrupt, "Importing table #{table_name}...... \e[91;1m#{response}\e[0m" end + end - if response.include? 'LAST_INSERT_ID' - @@logger.info "\e[32mImporting table #{table_name}...... Row 1 of #{qty_to_insert} success\e[0m" - - return response.split("\n")[1] - end + if response.include? 'LAST_INSERT_ID' + @logger.info "\e[32mImporting table #{table_name}...... Row 1 of #{qty_to_insert} success\e[0m" - if response.include? 'ROW_COUNT' - response_msg = response.split("\n") - row_count_inserted = response_msg[response_msg.size - 1] - @@logger.info "\e[32mImporting table #{table_name}...... Row #{ row_count_inserted || 1} of #{qty_to_insert} success\e[0m" + return response.split("\n")[1] + end - return true + if response.include? 'ROW_COUNT' + # Typically, something like: "mysql: [Warning] Using a password on the command line interface can be insecure.\nROW_COUNT()\n3\n" + # With warning: "mysql: [Warning] Using a password on the command line interface can be insecure.\nROW_COUNT()\n1743\nLevel\tCode\tMessage\nWarning\t1264\tOut of range value for column 'amount' at row 582\n" + response_msg = response.split("\n") + idx_row_count_inserted = response_msg.index('ROW_COUNT()') + 1 + row_count_inserted = response_msg[idx_row_count_inserted] + @logger.info "\e[32mImporting table #{table_name}...... Row #{row_count_inserted || 1} of #{qty_to_insert} success\e[0m" + if idx_row_count_inserted < response_msg.size - 1 + warning_msg = response_msg[response_msg.size - 1] + @logger.warn "\e[91m#{warning_msg}\e[0m" end - - return true end - def generate_insert_statement(tables) - - statements = [] - @@logger.info "\e[32mGenerating statements\e[0m" - - tables.each_key do |table_name| - table = tables[table_name] - if !table[:rows].nil? && table[:rows].size > 0 - columns_names = table[:col_names].join(",").gsub(/'/,'') + true + end - rows = [] - table[:rows].each do |row| - rows << row.map{|value| value.is_a?(Symbol) ? value.to_s : "'#{value.to_s.gsub(/['"]/, "'" => "\\'", '"' => '\\"')}'" }.join(",") + def generate_insert_statement(tables) + statements = [] + @logger.info "\e[32mGenerating statements\e[0m" + + tables.each_key do |table_name| + table = tables[table_name] + next unless !table[:rows].nil? && !table[:rows].empty? + + columns_names = table[:col_names].join(',').gsub(/'/, '') + + rows = [] + table[:rows].each do |row| + rows << row.map do |value| + if value.is_a?(Symbol) + value.to_s + elsif value.is_a?(Blob) + value.value + else + escaped_value = value.to_s.gsub(/['"]/, "'" => "\\'", '"' => '\\"') + .gsub('\N{LINE FEED}', "\n") + .gsub('\N{VERTICAL LINE}', '|') + "'#{escaped_value}'" end - - value_data = rows.map{|row| "(#{row})" }.join(",") - - statements << {:query => get_insert_statement(table_name,columns_names,value_data, rows.size), - :qty_to_insert => rows.size, :table_name => table_name, :table_data => table} - - end - + end.join(',') end - statements + # Break the insert statement into small chunks to avoid timeouts + rows.each_slice(1000).each do |subset_of_rows| + value_data = subset_of_rows.map { |row| "(#{row})" }.join(',') + statements << { query: build_insert_statement(table_name, columns_names, value_data, subset_of_rows.size), + qty_to_insert: subset_of_rows.size, table_name: table_name, table_data: table } + end end - private + statements + end - def get_insert_statement(table_name, columns_names, values, rows_qty) - return "INSERT INTO #{table_name} ( #{columns_names} ) VALUES #{values}; #{rows_qty == 1 ? LAST_INSERTED_ID : ROWS_UPDATED}" - end + private + def build_insert_statement(table_name, columns_names, values, rows_qty) + "INSERT INTO #{table_name} ( #{columns_names} ) VALUES #{values}; #{rows_qty == 1 ? LAST_INSERTED_ID : ROWS_UPDATED}" end - end - -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/diagnostic_file.rb b/kpm/lib/kpm/diagnostic_file.rb index 594e0161..c490446f 100644 --- a/kpm/lib/kpm/diagnostic_file.rb +++ b/kpm/lib/kpm/diagnostic_file.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require 'yaml' require 'tmpdir' require 'zip' @@ -5,195 +7,173 @@ require 'fileutils' require 'date' - module KPM + class DiagnosticFile + # Temporary directory + TMP_DIR_PREFIX = 'killbill-diagnostics-' + TMP_DIR = Dir.mktmpdir(TMP_DIR_PREFIX) + TMP_LOGS_DIR = TMP_DIR + File::Separator + 'logs' + + TENANT_FILE = 'tenant_config.data' + SYSTEM_FILE = 'system_configuration.data' + ACCOUNT_FILE = 'account.data' + + TODAY_DATE = Date.today.strftime('%m-%d-%y') + ZIP_FILE = 'killbill-diagnostics-' + TODAY_DATE + '.zip' + ZIP_LOG_FILE = 'logs.zip' + + def initialize(config_file = nil, killbill_api_credentials = nil, killbill_credentials = nil, killbill_url = nil, + database_name = nil, database_credentials = nil, database_host = nil, database_port = nil, kaui_web_path = nil, + killbill_web_path = nil, bundles_dir = nil, logger = nil) + @killbill_api_credentials = killbill_api_credentials + @killbill_credentials = killbill_credentials + @killbill_url = killbill_url + @database_name = database_name + @database_credentials = database_credentials + @database_host = database_host + @database_port = database_port + @config_file = config_file + @kaui_web_path = kaui_web_path + @killbill_web_path = killbill_web_path + @logger = logger + @original_logger_level = logger.level + @catalina_base = nil + @bundles_dir = bundles_dir + end - class DiagnosticFile - - # Temporary directory - TMP_DIR_PREFIX = 'killbill-diagnostics-' - TMP_DIR = Dir.mktmpdir(TMP_DIR_PREFIX) - TMP_LOGS_DIR = TMP_DIR + File::Separator + 'logs' - - TENANT_FILE = 'tenant_config.data' - SYSTEM_FILE = 'system_configuration.data' - ACCOUNT_FILE = 'account.data' - - TODAY_DATE = Date.today.strftime('%m-%d-%y') - ZIP_FILE = 'killbill-diagnostics-' + TODAY_DATE + '.zip' - ZIP_LOG_FILE = 'logs.zip' - - def initialize(config_file = nil, killbill_api_credentials = nil, killbill_credentials = nil, killbill_url = nil, - database_name = nil, database_credentials = nil, database_host = nil, database_port = nil, kaui_web_path = nil, - killbill_web_path = nil, bundles_dir = nil, logger = nil) - @killbill_api_credentials = killbill_api_credentials - @killbill_credentials = killbill_credentials - @killbill_url = killbill_url - @database_name = database_name - @database_credentials = database_credentials - @database_host = database_host - @database_port = database_port - @config_file = config_file - @kaui_web_path = kaui_web_path; - @killbill_web_path = killbill_web_path; - @logger = logger - @original_logger_level = logger.level; - @catalina_base = nil - @bundles_dir = bundles_dir - end - - def export_data(account_id = nil, log_dir = nil) - set_config(@config_file) - - tenant_export_file = get_tenant_config - system_export_file = get_system_config - account_export_file = get_account_data(account_id) unless account_id.nil? - log_files = get_log_files(log_dir) - - if File.exist?(system_export_file) && File.exist?(tenant_export_file) - - - zip_file_name = TMP_DIR + File::Separator + ZIP_FILE - - Zip::File.open(zip_file_name, Zip::File::CREATE) do |zipFile| - - zipFile.add(TENANT_FILE, tenant_export_file) - zipFile.add(SYSTEM_FILE, system_export_file) - zipFile.add(ACCOUNT_FILE, account_export_file) unless account_id.nil? - zipFile.add(ZIP_LOG_FILE, log_files) unless log_files.nil? + def export_data(account_id = nil, log_dir = nil) + self.config = @config_file - end + tenant_export_file = retrieve_tenant_config + system_export_file = retrieve_system_config + account_export_file = retrieve_account_data(account_id) unless account_id.nil? + log_files = retrieve_log_files(log_dir) - @logger.info "\e[32mDiagnostic data exported under #{zip_file_name} \e[0m" + raise Interrupt, 'Account id or configuration file not found' unless File.exist?(system_export_file) && File.exist?(tenant_export_file) - else - raise Interrupt, 'Account id or configuration file not found' - end + zip_file_name = TMP_DIR + File::Separator + ZIP_FILE + Zip::File.open(zip_file_name, Zip::File::CREATE) do |zip_file| + zip_file.add(TENANT_FILE, tenant_export_file) + zip_file.add(SYSTEM_FILE, system_export_file) + zip_file.add(ACCOUNT_FILE, account_export_file) unless account_id.nil? + zip_file.add(ZIP_LOG_FILE, log_files) unless log_files.nil? end - # Private methods - - private + @logger.info "\e[32mDiagnostic data exported under #{zip_file_name} \e[0m" - def get_tenant_config + zip_file_name + end - @logger.info 'Retrieving tenant configuration' - # this suppress the message of where it put the account file, this is to avoid confusion - @logger.level = Logger::WARN + # Private methods - @killbill_api_credentials ||= [get_config('killbill', 'api_key'), get_config('killbill','api_secret')] unless @config_file.nil? - @killbill_credentials ||= [get_config('killbill', 'user'), get_config('killbill','password')] unless @config_file.nil? - @killbill_url ||= 'http://' + get_config('killbill', 'host').to_s + ':' + get_config('killbill','port').to_s unless @config_file.nil? + private - tenant_config = KPM::TenantConfig.new(@killbill_api_credentials, - @killbill_credentials, @killbill_url, @logger) - export_file = tenant_config.export + def retrieve_tenant_config + @logger.info 'Retrieving tenant configuration' + # this suppress the message of where it put the account file, this is to avoid confusion + @logger.level = Logger::WARN - final = TMP_DIR + File::Separator + TENANT_FILE - FileUtils.move(export_file, final) - @logger.level = @original_logger_level + @killbill_api_credentials ||= [retrieve_config('killbill', 'api_key'), retrieve_config('killbill', 'api_secret')] unless @config_file.nil? + @killbill_credentials ||= [retrieve_config('killbill', 'user'), retrieve_config('killbill', 'password')] unless @config_file.nil? + @killbill_url ||= 'http://' + retrieve_config('killbill', 'host').to_s + ':' + retrieve_config('killbill', 'port').to_s unless @config_file.nil? - final - end + tenant_config = KPM::TenantConfig.new(@killbill_api_credentials, + @killbill_credentials, + @killbill_url, + @logger) + export_file = tenant_config.export - def get_system_config + final = TMP_DIR + File::Separator + TENANT_FILE + FileUtils.move(export_file, final) + @logger.level = @original_logger_level - @logger.info 'Retrieving system configuration' - system = KPM::System.new - export_data = system.information(@bundles_dir, true, @config_file, @kaui_web_path, @killbill_web_path) + final + end - get_system_catalina_base(export_data) + def retrieve_system_config + @logger.info 'Retrieving system configuration' + system = KPM::System.new(@logger) + export_data = system.information(@bundles_dir, true, @config_file, @kaui_web_path, @killbill_web_path) - export_file = TMP_DIR + File::SEPARATOR + SYSTEM_FILE - File.open(export_file, 'w') { |io| io.puts export_data } - export_file - end + system_catalina_base(export_data) + export_file = TMP_DIR + File::SEPARATOR + SYSTEM_FILE + File.open(export_file, 'w') { |io| io.puts export_data } + export_file + end - def get_account_data(account_id) + def retrieve_account_data(account_id) + @logger.info 'Retrieving account data for id: ' + account_id + # this suppress the message of where it put the account file, this is to avoid confusion + @logger.level = Logger::WARN - @logger.info 'Retrieving account data for id: ' + account_id - # this suppress the message of where it put the account file, this is to avoid confusion - @logger.level = Logger::WARN + account = KPM::Account.new(@config_file, @killbill_api_credentials, @killbill_credentials, + @killbill_url, @database_name, + @database_credentials, @database_host, @database_port, nil, @logger) + export_file = account.export_data(account_id) - account = KPM::Account.new(@config_file, @killbill_api_credentials, @killbill_credentials, - @killbill_url, @database_name, - @database_credentials,@database_host, @database_port, nil, @logger) - export_file = account.export_data(account_id) + final = TMP_DIR + File::Separator + ACCOUNT_FILE + FileUtils.move(export_file, final) + @logger.level = @original_logger_level + final + end - final = TMP_DIR + File::Separator + ACCOUNT_FILE - FileUtils.move(export_file, final) - @logger.level = @original_logger_level - final + def retrieve_log_files(log_dir) + if @catalina_base.nil? && log_dir.nil? + @logger.warn "\e[91;1mUnable to find Tomcat process, logs won't be collected: make sure to run kpm using the same user as the Tomcat process or pass the option --log-dir\e[0m" + return nil end - def get_log_files(log_dir) + @logger.info 'Collecting log files' + log_base = log_dir || (@catalina_base + File::Separator + 'logs') + log_items = Dir.glob(log_base + File::Separator + '*') - @logger.info 'Collecting log files' + zip_file_name = TMP_DIR + File::Separator + ZIP_LOG_FILE - if @catalina_base.nil? && log_dir.nil? - @logger.warn 'Unable to find Tomcat process, make sure to run kpm using the same user as the Tomcat process.' - return nil + Zip::File.open(zip_file_name, Zip::File::CREATE) do |zip_file| + log_items.each do |file| + name = file.split('/').last + zip_file.add(name, file) end - - log_base = log_dir || (@catalina_base + File::Separator + 'logs') - log_items = Dir.glob(log_base + File::Separator + '*') - - zip_file_name = TMP_DIR + File::Separator + ZIP_LOG_FILE - - Zip::File.open(zip_file_name, Zip::File::CREATE) do |zipFile| - - log_items.each do |file| - name = file.split('/').last - zipFile.add(name, file) - end - - end - - zip_file_name end - # Helpers - - def get_system_catalina_base(export_data) - @catalina_base = nil - system_json = JSON.parse(export_data) + zip_file_name + end - return if system_json['java_system_information']['catalina.base'].nil? + # Helpers - @catalina_base = system_json['java_system_information']['catalina.base']['value'] + def system_catalina_base(export_data) + @catalina_base = nil + system_json = JSON.parse(export_data) - end + return if system_json['java_system_information']['catalina.base'].nil? - # Utils + @catalina_base = system_json['java_system_information']['catalina.base']['value'] + end - def get_config(parent, child) - item = nil; + # Utils - if not @config.nil? + def retrieve_config(parent, child) + item = nil - config_parent = @config[parent] + unless @config.nil? - if not config_parent.nil? - item =config_parent[child] - end + config_parent = @config[parent] - end + item = config_parent[child] unless config_parent.nil? - item end - def set_config(config_file = nil) - @config = nil + item + end - if not config_file.nil? - if not Dir[config_file][0].nil? - @config = YAML::load_file(config_file) - end - end + def config=(config_file = nil) + @config = nil - end + return if config_file.nil? + @config = YAML.load_file(config_file) unless Dir[config_file][0].nil? end -end \ No newline at end of file + end +end diff --git a/kpm/lib/kpm/formatter.rb b/kpm/lib/kpm/formatter.rb index 18997730..293f9621 100644 --- a/kpm/lib/kpm/formatter.rb +++ b/kpm/lib/kpm/formatter.rb @@ -1,22 +1,18 @@ +# frozen_string_literal: true + # Extend String to be able to instantiate a object based on its classname class String def to_class - self.split('::').inject(Kernel) do |mod, class_name| + split('::').inject(Kernel) do |mod, class_name| mod.const_get(class_name) end end end module KPM - class Formatter - - def initialize - end - # Used for normal types where to_s is enough class DefaultFormatter - def initialize(label, input) @label = label @input = input @@ -37,7 +33,6 @@ def label # Used for the version map class VersionFormatter - def initialize(label, versions) @label = label @versions = versions @@ -48,79 +43,112 @@ def size end def to_s - @versions.map { |q| sha1=format_sha(q[:sha1]); disabled=""; disabled="(x)" if q[:is_disabled]; default=""; default="(*)" if q[:is_default]; "#{q[:version]}#{sha1}#{default}#{disabled}" }.join(", ") + @versions.map do |q| + sha1 = format_sha(q[:sha1]) + disabled = '' + disabled = '(x)' if q[:is_disabled] + default = '' + default = '(*)' if q[:is_default] + "#{q[:version]}#{sha1}#{default}#{disabled}" + end.join(', ') end def label "#{@label.to_s.upcase.gsub(/_/, ' ')} sha1=[], def=(*), del=(x)" end + private + def format_sha(sha) - return "[???]" if sha.nil? + return '[???]' if sha.nil? + "[#{sha[0..5]}..]" end end - def format(data, labels = nil) - if data.nil? || data.size == 0 - return - end + puts format_only(data, labels) + end - if labels.nil? + private + + def format_only(data, labels = nil) + return if data.nil? || data.empty? + if labels.nil? # What we want to output - labels = [{:label => :plugin_name}, - {:label => :plugin_key}, - {:label => :type}, - {:label => :group_id}, - {:label => :artifact_id}, - {:label => :packaging}, - {:label => :versions, :formatter => VersionFormatter.name}] + labels = [{ label: :plugin_name }, + { label: :plugin_key }, + { label: :type }, + { label: :group_id }, + { label: :artifact_id }, + { label: :packaging }, + { label: :versions, formatter: VersionFormatter.name }] end # Compute label to print along with max size for each label - labels_format_argument = [] + labels_format_argument = compute_labels(data, labels) + + border = compute_border(labels) + + format_string = compute_format(labels) + + formatted = "\n#{border}\n" + formatted += Kernel.format("#{format_string}\n", *labels_format_argument) + formatted += "#{border}\n" + data.keys.each do |key| v = data[key] - labels.each do |e| - # sanitize entry at the same time - v[e[:label]] = v[e[:label]] || "???" + arguments = [] + labels.inject(arguments) do |res, e| formatter = e[:formatter].nil? ? DefaultFormatter.new(e[:label], v[e[:label]]) : e[:formatter].to_class.new(e[:label], v[e[:label]]) - prev_size = e.key?(:size) ? e[:size] : formatter.label.size - cur_size = formatter.size - e[:size] = prev_size < cur_size ? cur_size : prev_size - labels_format_argument << formatter.label + res << formatter.to_s end + formatted += Kernel.format("#{format_string}\n", *arguments) end + formatted += "#{border}\n\n" + formatted + end + def compute_format(labels) + format = '|' + labels.inject(format) { |res, lbl| "#{res} %#{lbl[:size]}s |" } + end - border = "_" - border = (0...labels.size).inject(border) { |res, i| res="#{res}_"; res } - border = labels.inject(border) { |res, lbl| (0...lbl[:size] + 2).each { |s| res="#{res}_" }; res } - format = "|" - format = labels.inject(format) { |res, lbl| res="#{res} %#{lbl[:size]}s |"; res } - - + def compute_border(labels) + border = '_' + border = (0...labels.size).inject(border) { |res, _i| "#{res}_" } + labels.inject(border) do |res, lbl| + (0...lbl[:size] + 2).each { |_s| res = "#{res}_" } + res + end + end - puts "\n#{border}\n" - puts "#{format}\n" % labels_format_argument - puts "#{border}\n" + # Return labels for each row and update the labels hash with the size of each column + def compute_labels(data, labels) + seen_labels = Set.new + labels_format_argument = [] data.keys.each do |key| v = data[key] + labels.each do |e| + # sanitize entry at the same time + v[e[:label]] = v[e[:label]] || '???' - arguments = [] - labels.inject(arguments) do |res, e| + # Always recompute the size formatter = e[:formatter].nil? ? DefaultFormatter.new(e[:label], v[e[:label]]) : e[:formatter].to_class.new(e[:label], v[e[:label]]) - res << formatter.to_s + prev_size = e.key?(:size) ? e[:size] : formatter.label.size + cur_size = formatter.size + e[:size] = prev_size < cur_size ? cur_size : prev_size + + # Labels should be unique though + labels_format_argument << formatter.label unless seen_labels.include?(e[:label]) + seen_labels << e[:label] end - puts "#{format}\n" % arguments end - puts "#{border}\n\n" - + labels_format_argument end end end diff --git a/kpm/lib/kpm/inspector.rb b/kpm/lib/kpm/inspector.rb index 0fa42af4..c467173b 100644 --- a/kpm/lib/kpm/inspector.rb +++ b/kpm/lib/kpm/inspector.rb @@ -1,15 +1,14 @@ -module KPM +# frozen_string_literal: true +module KPM class Inspector - - def initialize - end + def initialize; end def inspect(bundles_dir) bundles_dir = Pathname.new(bundles_dir || KPM::BaseInstaller::DEFAULT_BUNDLES_DIR).expand_path - plugins= bundles_dir.join('plugins') - ruby_plugins_path=bundles_dir.join('plugins/ruby') - java_plugins_path=bundles_dir.join('plugins/java') + plugins = bundles_dir.join('plugins') + ruby_plugins_path = bundles_dir.join('plugins/ruby') + java_plugins_path = bundles_dir.join('plugins/java') all_plugins = {} build_plugins_for_type(ruby_plugins_path, 'ruby', all_plugins) @@ -27,11 +26,9 @@ def format(all_plugins) formatter.format(all_plugins) end - private def add_sha1_info(bundles_dir, all_plugins) - sha1_filename = KPM::BaseInstaller::SHA1_FILENAME sha1_file = "#{bundles_dir}/#{sha1_filename}" sha1_checker = Sha1Checker.from_file(sha1_file) @@ -43,14 +40,12 @@ def add_sha1_info(bundles_dir, all_plugins) coord, sha1 = e coordinate_map = KPM::Coordinates.get_coordinate_map(coord) - if coordinate_map[:group_id] == cur[:group_id] && - coordinate_map[:artifact_id] == cur[:artifact_id] && - coordinate_map[:packaging] == cur[:packaging] - - found_version = cur[:versions].select { |v| v[:version] == coordinate_map[:version] }[0] - found_version[:sha1] = sha1 if found_version - end + next unless coordinate_map[:group_id] == cur[:group_id] && + coordinate_map[:artifact_id] == cur[:artifact_id] && + coordinate_map[:packaging] == cur[:packaging] + found_version = cur[:versions].select { |v| v[:version] == coordinate_map[:version] }[0] + found_version[:sha1] = sha1 if found_version end end end @@ -67,31 +62,27 @@ def add_plugin_identifier_info(plugins, all_plugins) end end - def build_plugins_for_type(plugins_path, type, res) - if !File.exists?(plugins_path) - return [] - end - get_entries(plugins_path).inject(res) do |out, e| + return [] unless File.exist?(plugins_path) + + get_entries(plugins_path).each_with_object(res) do |e, out| plugin_map = build_plugin_map(e, plugins_path.join(e), type) out[e] = plugin_map - out end end def build_plugin_map(plugin_name, plugin_path, type) - - plugin_map = {:plugin_name => plugin_name, :plugin_path => plugin_path.to_s, :type => type} + plugin_map = { plugin_name: plugin_name, plugin_path: plugin_path.to_s, type: type } entries = get_entries(plugin_path) - set_default = entries.select { |e| e == "SET_DEFAULT" }[0] + set_default = entries.select { |e| e == 'SET_DEFAULT' }[0] default_version = File.basename(File.readlink(plugin_path.join(set_default))) if set_default - versions = entries.select do |e| - e != "SET_DEFAULT" - end.inject([]) do |out, e| - is_disabled = File.exists?(plugin_path.join(e).join('tmp').join('disabled.txt')) - out << {:version => e, :is_default => default_version == e, :is_disabled => is_disabled, :sha1 => nil}; - out + non_default = entries.reject do |e| + e == 'SET_DEFAULT' + end + versions = non_default.each_with_object([]) do |e, out| + is_disabled = File.exist?(plugin_path.join(e).join('tmp').join('disabled.txt')) + out << { version: e, is_default: default_version == e, is_disabled: is_disabled, sha1: nil } end versions.sort! { |a, b| a[:version] <=> b[:version] } @@ -103,6 +94,5 @@ def build_plugin_map(plugin_name, plugin_path, type) def get_entries(path) Dir.entries(path).select { |entry| entry != '.' && entry != '..' && File.directory?(File.join(path, entry)) } end - end end diff --git a/kpm/lib/kpm/installer.rb b/kpm/lib/kpm/installer.rb index 15ae3c4e..77f7a7ce 100644 --- a/kpm/lib/kpm/installer.rb +++ b/kpm/lib/kpm/installer.rb @@ -1,40 +1,36 @@ +# frozen_string_literal: true + require 'logger' require 'pathname' require 'yaml' module KPM class Installer < BaseInstaller - - def self.from_file(config_path=nil, logger=nil) - if config_path.nil? - # Install Kill Bill, Kaui and the KPM plugin by default - config = build_default_config - else - config = YAML::load_file(config_path) - end + def self.from_file(config_path = nil, logger = nil) + config = if config_path.nil? + # Install Kill Bill, Kaui and the KPM plugin by default + build_default_config + else + YAML.load_file(config_path) + end Installer.new(config, logger) end - def self.build_default_config(all_kb_versions=nil) + def self.build_default_config(all_kb_versions = nil) latest_stable_version = get_kb_latest_stable_version(all_kb_versions) { - 'killbill' => { - 'version' => latest_stable_version.to_s, - 'plugins' => { - 'ruby' => [ - {'name' => 'kpm'} - ] - } - }, - 'kaui' => { - # Note: we assume no unstable version of Kaui is published today - 'version' => 'LATEST' - } + 'killbill' => { + 'version' => latest_stable_version.to_s + }, + 'kaui' => { + # Note: we assume no unstable version of Kaui is published today + 'version' => 'LATEST' + } } end - def self.get_kb_latest_stable_version(all_kb_versions=nil) + def self.get_kb_latest_stable_version(all_kb_versions = nil) all_kb_versions ||= KillbillServerArtifact.versions(KillbillServerArtifact::KILLBILL_ARTIFACT_ID, KillbillServerArtifact::KILLBILL_PACKAGING, KillbillServerArtifact::KILLBILL_CLASSIFIER, @@ -42,10 +38,14 @@ def self.get_kb_latest_stable_version(all_kb_versions=nil) true).to_a latest_stable_version = Gem::Version.new('0.0.0') all_kb_versions.each do |kb_version| - version = Gem::Version.new(kb_version) rescue nil + version = begin + Gem::Version.new(kb_version) + rescue StandardError + nil + end next if version.nil? - major, minor, patch, pre = version.segments + _major, minor, _patch, pre = version.segments next if !pre.nil? || minor.nil? || minor.to_i.odd? latest_stable_version = version if version > latest_stable_version @@ -54,7 +54,7 @@ def self.get_kb_latest_stable_version(all_kb_versions=nil) latest_stable_version.to_s end - def initialize(raw_config, logger=nil) + def initialize(raw_config, logger = nil) @config = raw_config['killbill'] @kaui_config = raw_config['kaui'] @@ -65,14 +65,26 @@ def initialize(raw_config, logger=nil) logger.level = Logger::INFO end - nexus_config = !@config.nil? ? @config['nexus'] : (!@kaui_config.nil? ? @kaui_config['nexus'] : nil) + nexus_config = if !@config.nil? + @config['nexus'] + elsif !@kaui_config.nil? + @kaui_config['nexus'] + else + nil + end nexus_ssl_verify = !nexus_config.nil? ? nexus_config['ssl_verify'] : true super(logger, nexus_config, nexus_ssl_verify) end - def install(force_download=false, verify_sha1=true) - bundles_dir = !@config.nil? ? @config['plugins_dir'] : (!@kaui_config.nil? ? @kaui_config['plugins_dir'] : nil) + def install(force_download = false, verify_sha1 = true) + bundles_dir = if !@config.nil? + @config['plugins_dir'] + elsif !@kaui_config.nil? + @kaui_config['plugins_dir'] + else + nil + end bundles_dir ||= DEFAULT_BUNDLES_DIR help = nil @@ -80,9 +92,7 @@ def install(force_download=false, verify_sha1=true) help = install_tomcat if @config['webapp_path'].nil? install_killbill_server(@config['group_id'], @config['artifact_id'], @config['packaging'], @config['classifier'], @config['version'], @config['webapp_path'], bundles_dir, force_download, verify_sha1) install_plugins(bundles_dir, @config['version'], force_download, verify_sha1) - unless @config['default_bundles'] == false - install_default_bundles(bundles_dir, @config['default_bundles_version'], @config['version'], force_download, verify_sha1) - end + install_default_bundles(bundles_dir, @config['default_bundles_version'], @config['version'], force_download, verify_sha1) unless @config['default_bundles'] == false clean_up_descriptors(bundles_dir) end @@ -95,13 +105,13 @@ def install(force_download=false, verify_sha1=true) install_kaui(@kaui_config['group_id'], @kaui_config['artifact_id'], @kaui_config['packaging'], @kaui_config['classifier'], @kaui_config['version'], @kaui_config['webapp_path'], bundles_dir, force_download, verify_sha1) end - @trace_logger.add('help',help) + @trace_logger.add('help', nil, help) @trace_logger.to_json end private - def install_tomcat(dir=Dir.pwd) + def install_tomcat(dir = Dir.pwd) # Download and unpack Tomcat manager = KPM::TomcatManager.new(dir, @logger) manager.download @@ -109,9 +119,7 @@ def install_tomcat(dir=Dir.pwd) # Update main config root_war_path = manager.setup @config['webapp_path'] = root_war_path - unless @kaui_config.nil? - @kaui_config['webapp_path'] = Pathname.new(File.dirname(root_war_path)).join('kaui.war').to_s - end + @kaui_config['webapp_path'] = Pathname.new(File.dirname(root_war_path)).join('kaui.war').to_s unless @kaui_config.nil? # Help message manager.help @@ -123,7 +131,7 @@ def install_plugins(bundles_dir, raw_kb_version, force_download, verify_sha1) end def install_java_plugins(bundles_dir, raw_kb_version, force_download, verify_sha1) - return if @config['plugins'].nil? or @config['plugins']['java'].nil? + return if @config['plugins'].nil? || @config['plugins']['java'].nil? infos = [] @config['plugins']['java'].each do |plugin| @@ -134,13 +142,13 @@ def install_java_plugins(bundles_dir, raw_kb_version, force_download, verify_sha end def install_ruby_plugins(bundles_dir, raw_kb_version, force_download, verify_sha1) - return if @config['plugins'].nil? or @config['plugins']['ruby'].nil? + return if @config['plugins'].nil? || @config['plugins']['ruby'].nil? - verify_jruby_jar=true + verify_jruby_jar = true infos = [] @config['plugins']['ruby'].each do |plugin| infos << install_plugin(plugin['name'], raw_kb_version, plugin['group_id'], plugin['artifact_id'], plugin['packaging'], plugin['classifier'], plugin['version'], bundles_dir, 'ruby', force_download, verify_sha1, verify_jruby_jar) - verify_jruby_jar=false + verify_jruby_jar = false end infos @@ -161,11 +169,11 @@ def clean_up_plugin_identifiers(bundles_dir) plugin_identifiers = plugins_manager.read_plugin_identifiers removed_identifiers = [] plugin_identifiers.each do |plugin_key, plugin| - if !installed_plugins.has_key?(plugin['plugin_name']) - _, plugin_entry = plugins_manager.get_identifier_key_and_entry(plugin_key) - plugins_manager.remove_plugin_identifier_key(plugin_key) - removed_identifiers << plugin_entry - end + next if installed_plugins.key?(plugin['plugin_name']) + + _, plugin_entry = plugins_manager.get_identifier_key_and_entry(plugin_key) + plugins_manager.remove_plugin_identifier_key(plugin_key) + removed_identifiers << plugin_entry end removed_identifiers @@ -182,6 +190,5 @@ def clean_up_sha1s(removed_plugins, plugins_dir) sha1checker.remove_entry!(coordinates) end end - end end diff --git a/kpm/lib/kpm/kaui_artifact.rb b/kpm/lib/kpm/kaui_artifact.rb index d1c3dc2c..e1132eda 100644 --- a/kpm/lib/kpm/kaui_artifact.rb +++ b/kpm/lib/kpm/kaui_artifact.rb @@ -1,12 +1,13 @@ +# frozen_string_literal: true + require 'rexml/document' require 'set' module KPM class KauiArtifact < BaseArtifact class << self - def versions(overrides={}, ssl_verify=true) - - coordinate_map = {:group_id => KPM::BaseArtifact::KAUI_GROUP_ID, :artifact_id => KPM::BaseArtifact::KAUI_ARTIFACT_ID, :packaging => KPM::BaseArtifact::KAUI_PACKAGING, :classifier => KPM::BaseArtifact::KAUI_CLASSIFIER} + def versions(overrides = {}, ssl_verify = true) + coordinate_map = { group_id: KPM::BaseArtifact::KAUI_GROUP_ID, artifact_id: KPM::BaseArtifact::KAUI_ARTIFACT_ID, packaging: KPM::BaseArtifact::KAUI_PACKAGING, classifier: KPM::BaseArtifact::KAUI_CLASSIFIER } coordinates = KPM::Coordinates.build_coordinates(coordinate_map) response = REXML::Document.new nexus_remote(overrides, ssl_verify).search_for_artifacts(coordinates) diff --git a/kpm/lib/kpm/killbill_plugin_artifact.rb b/kpm/lib/kpm/killbill_plugin_artifact.rb index 145dcb8d..536544a9 100644 --- a/kpm/lib/kpm/killbill_plugin_artifact.rb +++ b/kpm/lib/kpm/killbill_plugin_artifact.rb @@ -1,23 +1,25 @@ +# frozen_string_literal: true + require 'rexml/document' require 'set' module KPM class KillbillPluginArtifact < BaseArtifact class << self - def pull(logger, group_id, artifact_id, packaging='jar', classifier=nil, version='LATEST', plugin_name=nil, destination_path=nil, sha1_file=nil, force_download=false, verify_sha1=true, overrides={}, ssl_verify=true) - coordinate_map = {:group_id => group_id, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier, :version => version} - pull_and_put_in_place(logger, coordinate_map, plugin_name, destination_path, is_ruby_plugin_and_should_skip_top_dir(group_id, artifact_id), sha1_file, force_download, verify_sha1, overrides, ssl_verify) + def pull(logger, group_id, artifact_id, packaging = 'jar', classifier = nil, version = 'LATEST', plugin_name = nil, destination_path = nil, sha1_file = nil, force_download = false, verify_sha1 = true, overrides = {}, ssl_verify = true) + coordinate_map = { group_id: group_id, artifact_id: artifact_id, packaging: packaging, classifier: classifier, version: version } + pull_and_put_in_place(logger, coordinate_map, plugin_name, destination_path, ruby_plugin_and_should_skip_top_dir?(group_id, artifact_id), sha1_file, force_download, verify_sha1, overrides, ssl_verify) end - def versions(overrides={}, ssl_verify=true) - plugins = {:java => {}, :ruby => {}} + def versions(overrides = {}, ssl_verify = true) + plugins = { java: {}, ruby: {} } nexus = nexus_remote(overrides, ssl_verify) [[:java, KPM::BaseArtifact::KILLBILL_JAVA_PLUGIN_GROUP_ID], [:ruby, KPM::BaseArtifact::KILLBILL_RUBY_PLUGIN_GROUP_ID]].each do |type_and_group_id| response = REXML::Document.new nexus.search_for_artifacts(type_and_group_id[1]) response.elements.each('searchNGResponse/data/artifact') do |element| - artifact_id = element.elements['artifactId'].text + artifact_id = element.elements['artifactId'].text plugins[type_and_group_id[0]][artifact_id] ||= SortedSet.new plugins[type_and_group_id[0]][artifact_id] << element.elements['version'].text end @@ -27,9 +29,10 @@ def versions(overrides={}, ssl_verify=true) end protected + # Magic methods... - def is_ruby_plugin_and_should_skip_top_dir(group_id, artifact_id) + def ruby_plugin_and_should_skip_top_dir?(group_id, artifact_id) # The second check is for custom ruby plugins group_id == KPM::BaseArtifact::KILLBILL_RUBY_PLUGIN_GROUP_ID || artifact_id.include?('plugin') end diff --git a/kpm/lib/kpm/killbill_server_artifact.rb b/kpm/lib/kpm/killbill_server_artifact.rb index 77d124e5..6624341b 100644 --- a/kpm/lib/kpm/killbill_server_artifact.rb +++ b/kpm/lib/kpm/killbill_server_artifact.rb @@ -1,11 +1,13 @@ +# frozen_string_literal: true + require 'rexml/document' require 'set' module KPM class KillbillServerArtifact < BaseArtifact class << self - def versions(artifact_id, packaging=KPM::BaseArtifact::KILLBILL_PACKAGING, classifier=KPM::BaseArtifact::KILLBILL_CLASSIFIER, overrides={}, ssl_verify=true) - coordinate_map = {:group_id => KPM::BaseArtifact::KILLBILL_GROUP_ID, :artifact_id => artifact_id, :packaging => packaging, :classifier => classifier} + def versions(artifact_id, packaging = KPM::BaseArtifact::KILLBILL_PACKAGING, classifier = KPM::BaseArtifact::KILLBILL_CLASSIFIER, overrides = {}, ssl_verify = true) + coordinate_map = { group_id: KPM::BaseArtifact::KILLBILL_GROUP_ID, artifact_id: artifact_id, packaging: packaging, classifier: classifier } coordinates = KPM::Coordinates.build_coordinates(coordinate_map) response = REXML::Document.new nexus_remote(overrides, ssl_verify).search_for_artifacts(coordinates) versions = SortedSet.new @@ -13,10 +15,13 @@ def versions(artifact_id, packaging=KPM::BaseArtifact::KILLBILL_PACKAGING, class versions end - def info(version='LATEST', overrides={}, ssl_verify=true) + def info(version = 'LATEST', sha1_file = nil, force_download = false, verify_sha1 = true, overrides = {}, ssl_verify = true) logger = Logger.new(STDOUT) logger.level = Logger::ERROR + # Initialize as early as possible (used in rescue block below) + sha1_checker = sha1_file ? Sha1Checker.from_file(sha1_file) : nil + version = KPM::Installer.get_kb_latest_stable_version if version == 'LATEST' versions = {} @@ -29,9 +34,9 @@ def info(version='LATEST', overrides={}, ssl_verify=true) nil, version, dir, - nil, - false, - true, + sha1_file, + force_download, + verify_sha1, overrides, ssl_verify) @@ -51,19 +56,28 @@ def info(version='LATEST', overrides={}, ssl_verify=true) nil, oss_parent_version, dir, - nil, - false, - true, + sha1_file, + force_download, + verify_sha1, overrides, ssl_verify) pom = REXML::Document.new(File.new(oss_pom_info[:file_path])) properties_element = pom.root.elements['properties'] - %w(killbill-api killbill-plugin-api killbill-commons killbill-platform).each do |property| + %w[killbill-api killbill-plugin-api killbill-commons killbill-platform].each do |property| versions[property] = properties_element.elements["#{property}.version"].text end + + sha1_checker.cache_killbill_info(version, versions) if sha1_checker end versions + rescue StandardError => e + # Network down? Hopefully, we have something in the cache + cached_version = sha1_checker ? sha1_checker.killbill_info(version) : nil + raise e if force_download || !cached_version + + # Use the cache + cached_version end end end diff --git a/kpm/lib/kpm/migrations.rb b/kpm/lib/kpm/migrations.rb index c5ba1645..2231f7c7 100644 --- a/kpm/lib/kpm/migrations.rb +++ b/kpm/lib/kpm/migrations.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require 'base64' require 'json' require 'logger' @@ -6,10 +8,9 @@ module KPM class Migrations - - KILLBILL_MIGRATION_PATH = /src\/main\/resources\/org\/killbill\/billing\/[a-z]+\/migration\/(V[0-9a-zA-Z_]+.sql)/ - JAVA_PLUGIN_MIGRATION_PATH = /src\/main\/resources\/migration\/(V[0-9a-zA-Z_]+.sql)/ - RUBY_PLUGIN_MIGRATION_PATH = /db\/migrate\/([0-9a-zA-Z_]+.rb)/ + KILLBILL_MIGRATION_PATH = %r{src/main/resources/org/killbill/billing/[a-z]+/migration/(V[0-9a-zA-Z_]+.sql)}.freeze + JAVA_PLUGIN_MIGRATION_PATH = %r{src/main/resources/migration/(V[0-9a-zA-Z_]+.sql)}.freeze + RUBY_PLUGIN_MIGRATION_PATH = %r{db/migrate/([0-9a-zA-Z_]+.rb)}.freeze # Go to https://github.com/settings/tokens to generate a token def initialize(from_version, to_version = nil, repository = 'killbill/killbill', oauth_token = nil, logger = Logger.new(STDOUT)) @@ -34,7 +35,7 @@ def migrations end def save(dir = nil) - return nil if migrations.size == 0 + return nil if migrations.empty? dir ||= Dir.mktmpdir @logger.debug("Storing migrations to #{dir}") @@ -71,8 +72,8 @@ def for_version(version = @from_version, name_only = false, migrations_to_skip = end migrations << { - :name => migration_name, - :sql => sql + name: migration_name, + sql: sql } end diff --git a/kpm/lib/kpm/nexus_helper/actions.rb b/kpm/lib/kpm/nexus_helper/actions.rb index c4654bc2..7a644e5b 100644 --- a/kpm/lib/kpm/nexus_helper/actions.rb +++ b/kpm/lib/kpm/nexus_helper/actions.rb @@ -1,9 +1,25 @@ +# frozen_string_literal: true + require_relative 'nexus_api_calls_v2' -#require_relative 'nexus_api_calls_v3' +# require_relative 'nexus_api_calls_v3' module KPM module NexusFacade class Actions + DEFAULT_RETRIES = 3 + DEFAULT_CONNECTION_ERRORS = { + EOFError => 'The remote server dropped the connection', + Errno::ECONNREFUSED => 'The remote server refused the connection', + Errno::ECONNRESET => 'The remote server reset the connection', + Timeout::Error => 'The connection to the remote server timed out', + Errno::ETIMEDOUT => 'The connection to the remote server timed out', + SocketError => 'The connection to the remote server could not be established', + OpenSSL::X509::CertificateError => 'The remote server did not accept the provided SSL certificate', + OpenSSL::SSL::SSLError => 'The SSL connection to the remote server could not be established', + Zlib::BufError => 'The remote server replied with an invalid response', + KPM::NexusFacade::UnexpectedStatusCodeException => nil + }.freeze + attr_reader :nexus_api_call def initialize(overrides, ssl_verify, logger) @@ -11,24 +27,47 @@ def initialize(overrides, ssl_verify, logger) overrides[:url] ||= 'https://oss.sonatype.org' overrides[:repository] ||= 'releases' - #this is where the version is verified - #example if - #@nexus_api_call = overrides['version'] == '3' ? NexusApiCallsV3.new(overrides, ssl_verify) : NexusApiCallsV2.new(overrides, ssl_verify) + @logger = logger + + # this is where the version is verified + # example if + # @nexus_api_call = overrides['version'] == '3' ? NexusApiCallsV3.new(overrides, ssl_verify) : NexusApiCallsV2.new(overrides, ssl_verify) @nexus_api_call = NexusApiCallsV2.new(overrides, ssl_verify, logger) end - def pull_artifact(coordinates, destination=nil) - nexus_api_call.pull_artifact(coordinates, destination) + def pull_artifact(coordinates, destination = nil) + retry_exceptions("pull_artifact #{coordinates}") { nexus_api_call.pull_artifact(coordinates, destination) } end def get_artifact_info(coordinates) - nexus_api_call.get_artifact_info(coordinates) + retry_exceptions("get_artifact_info #{coordinates}") { nexus_api_call.get_artifact_info(coordinates) } end def search_for_artifacts(coordinates) - nexus_api_call.search_for_artifacts(coordinates) + retry_exceptions("search_for_artifacts #{coordinates}") { nexus_api_call.search_for_artifacts(coordinates) } + end + + private + + def retry_exceptions(tag) + retries = DEFAULT_RETRIES + + begin + yield + rescue *DEFAULT_CONNECTION_ERRORS.keys => e + retries -= 1 + + @logger.warn(format('Transient error during %s, retrying (attempt=%d): %s', tag: tag, attempt: DEFAULT_RETRIES - retries, msg: derived_error_message(DEFAULT_CONNECTION_ERRORS, e))) + retry unless retries.zero? + + raise + end end + def derived_error_message(errors, exception) + key = (errors.keys & exception.class.ancestors).first + (key ? errors[key] : nil) || exception.message + end end end end diff --git a/kpm/lib/kpm/nexus_helper/nexus_api_calls_v2.rb b/kpm/lib/kpm/nexus_helper/nexus_api_calls_v2.rb index 95a15a99..21db3b35 100644 --- a/kpm/lib/kpm/nexus_helper/nexus_api_calls_v2.rb +++ b/kpm/lib/kpm/nexus_helper/nexus_api_calls_v2.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require 'net/http' require 'uri' require 'rexml/document' @@ -5,13 +7,12 @@ module KPM module NexusFacade - class UnexpectedStatusCodeException < StandardError def initialize(code) @code = code end - def message + def message "The server responded with a #{@code} status code which is unexpected." end end @@ -34,7 +35,6 @@ class NexusApiCallsV2 OPEN_TIMEOUT_DEFAULT = 60 ERROR_MESSAGE_404 = 'The artifact you requested information for could not be found. Please ensure it exists inside the Nexus.' - ERROR_MESSAGE_503 = 'Could not connect to Nexus. Please ensure the url you are using is reachable.' attr_reader :version attr_reader :configuration @@ -49,14 +49,16 @@ def initialize(configuration, ssl_verify, logger) def search_for_artifacts(coordinates) logger.debug "Entered - Search for artifact, coordinates: #{coordinates}" - response = get_response(coordinates, SEARCH_FOR_ARTIFACT_ENDPOINT, [:g, :a]) + response = get_response(coordinates, SEARCH_FOR_ARTIFACT_ENDPOINT, %i[g a]) case response.code - when '200' - logger.debug "response body: #{response.body}" - return response.body - else - raise UnexpectedStatusCodeException.new(response.code) + when '200' + logger.debug "response body: #{response.body}" + return response.body + when '404' + raise StandardError, ERROR_MESSAGE_404 + else + raise UnexpectedStatusCodeException, response.code end end @@ -65,125 +67,116 @@ def get_artifact_info(coordinates) response = get_response(coordinates, GET_ARTIFACT_INFO_ENDPOINT, nil) case response.code - when '200' - logger.debug "response body: #{response.body}" - return response.body - when '404' - raise StandardError.new(ERROR_MESSAGE_404) - when '503' - raise StandardError.new(ERROR_MESSAGE_503) - else - raise UnexpectedStatusCodeException.new(response.code) + when '200' + logger.debug "response body: #{response.body}" + return response.body + when '404' + raise StandardError, ERROR_MESSAGE_404 + else + raise UnexpectedStatusCodeException, response.code end end - def pull_artifact(coordinates ,destination) + def pull_artifact(coordinates, destination) logger.debug "Entered - Pull artifact, coordinates: #{coordinates}" file_name = get_file_name(coordinates) - destination = File.join(File.expand_path(destination || "."), file_name) + destination = File.join(File.expand_path(destination || '.'), file_name) logger.debug "destination: #{destination}" response = get_response(coordinates, PULL_ARTIFACT_ENDPOINT, nil) case response.code - when '301', '307' - location = response['Location'].gsub!(configuration[:url],'') - logger.debug 'fetching artifact' - file_response = get_response(nil,location, nil) - - File.open(destination, "wb") do |io| - io.write(file_response.body) - end - when 404 - raise StandardError.new(ERROR_MESSAGE_404) - else - raise UnexpectedStatusCodeException.new(response.code) + when '301', '307' + location = response['Location'].gsub!(configuration[:url], '') + logger.debug 'fetching artifact' + file_response = get_response(nil, location, nil) + + File.open(destination, 'wb') do |io| + io.write(file_response.body) + end + when 404 + raise StandardError, ERROR_MESSAGE_404 + else + raise UnexpectedStatusCodeException, response.code end { - :file_name => file_name, - :file_path => File.expand_path(destination), - :version => version, - :size => File.size(File.expand_path(destination)) + file_name: file_name, + file_path: File.expand_path(destination), + version: version, + size: File.size(File.expand_path(destination)) } end private - def parse_coordinates(coordinates) - if coordinates.nil? - raise ArtifactMalformedException - end + def parse_coordinates(coordinates) + raise ArtifactMalformedException if coordinates.nil? - split_coordinates = coordinates.split(":") - if (split_coordinates.size == 0 or split_coordinates.size > 5) - raise ArtifactMalformedException - end + split_coordinates = coordinates.split(':') + raise ArtifactMalformedException if split_coordinates.empty? || (split_coordinates.size > 5) - artifact = Hash.new + artifact = {} - artifact[:group_id] = split_coordinates[0] - artifact[:artifact_id] = split_coordinates[1] - artifact[:extension] = split_coordinates.size > 3 ? split_coordinates[2] : "jar" - artifact[:classifier] = split_coordinates.size > 4 ? split_coordinates[3] : nil - artifact[:version] = split_coordinates[-1] + artifact[:group_id] = split_coordinates[0] + artifact[:artifact_id] = split_coordinates[1] + artifact[:extension] = split_coordinates.size > 3 ? split_coordinates[2] : 'jar' + artifact[:classifier] = split_coordinates.size > 4 ? split_coordinates[3] : nil + artifact[:version] = split_coordinates[-1] - artifact[:version].upcase! if version == "latest" + artifact[:version].upcase! if version == 'latest' - return artifact - end + artifact + end - def get_file_name(coordinates) - artifact = parse_coordinates(coordinates) + def get_file_name(coordinates) + artifact = parse_coordinates(coordinates) - if artifact[:version].casecmp("latest") - artifact[:version] = REXML::Document.new(get_artifact_info(coordinates)).elements["//version"].text - end + artifact[:version] = REXML::Document.new(get_artifact_info(coordinates)).elements['//version'].text if artifact[:version].casecmp('latest') - if artifact[:classifier].nil? - "#{artifact[:artifact_id]}-#{artifact[:version]}.#{artifact[:extension]}" - else - "#{artifact[:artifact_id]}-#{artifact[:version]}-#{artifact[:classifier]}.#{artifact[:extension]}" - end + if artifact[:classifier].nil? + "#{artifact[:artifact_id]}-#{artifact[:version]}.#{artifact[:extension]}" + else + "#{artifact[:artifact_id]}-#{artifact[:version]}-#{artifact[:classifier]}.#{artifact[:extension]}" end + end - def get_query_params(coordinates, what_parameters = nil) - artifact = parse_coordinates(coordinates) - @version = artifact[:version].to_s.upcase - - query = {:g => artifact[:group_id], :a => artifact[:artifact_id], :e => artifact[:extension], :v => version, :r => configuration[:repository]} - query.merge!({:c => artifact[:classifier]}) unless artifact[:classifier].nil? + def build_query_params(coordinates, what_parameters = nil) + artifact = parse_coordinates(coordinates) + @version = artifact[:version].to_s.upcase - params = what_parameters.nil? ? query : Hash.new - what_parameters.each {|key| params[key] = query[key] unless query[key].nil? } unless what_parameters.nil? + query = { g: artifact[:group_id], a: artifact[:artifact_id], e: artifact[:extension], v: version, r: configuration[:repository] } + query.merge!(c: artifact[:classifier]) unless artifact[:classifier].nil? - params.map{|key,value| "#{key}=#{value}"}.join('&') - end + params = what_parameters.nil? ? query : {} + what_parameters.each { |key| params[key] = query[key] unless query[key].nil? } unless what_parameters.nil? - def get_response(coordinates, endpoint, what_parameters) - http = get_http - query_params = get_query_params(coordinates, what_parameters) unless coordinates.nil? - endpoint = get_endpoint_with_params(endpoint, query_params) unless coordinates.nil? - request = Net::HTTP::Get.new(endpoint) + params.map { |key, value| "#{key}=#{value}" }.join('&') + end - logger.debug "request endpoint: #{endpoint}" + def get_response(coordinates, endpoint, what_parameters) + http = build_http + query_params = build_query_params(coordinates, what_parameters) unless coordinates.nil? + endpoint = endpoint_with_params(endpoint, query_params) unless coordinates.nil? + request = Net::HTTP::Get.new(endpoint) - response = http.request(request) - response - end + logger.debug "request endpoint: #{endpoint}" - def get_http - uri = URI.parse(configuration[:url]) - http = Net::HTTP.new(uri.host,uri.port) - http.open_timeout = configuration[:open_timeout] || OPEN_TIMEOUT_DEFAULT #seconds - http.read_timeout = configuration[:read_timeout] || READ_TIMEOUT_DEFAULT #seconds - http.use_ssl = (ssl_verify != false) - http.verify_mode = OpenSSL::SSL::VERIFY_NONE unless ssl_verify - http - end + response = http.request(request) + response + end - def get_endpoint_with_params(endpoint,query_params) - "#{endpoint}?#{URI::DEFAULT_PARSER.escape(query_params)}" - end + def build_http + uri = URI.parse(configuration[:url]) + http = Net::HTTP.new(uri.host, uri.port) + http.open_timeout = configuration[:open_timeout] || OPEN_TIMEOUT_DEFAULT # seconds + http.read_timeout = configuration[:read_timeout] || READ_TIMEOUT_DEFAULT # seconds + http.use_ssl = (ssl_verify != false) + http.verify_mode = OpenSSL::SSL::VERIFY_NONE unless ssl_verify + http + end + def endpoint_with_params(endpoint, query_params) + "#{endpoint}?#{URI::DEFAULT_PARSER.escape(query_params)}" + end end end end diff --git a/kpm/lib/kpm/nexus_helper/nexus_facade.rb b/kpm/lib/kpm/nexus_helper/nexus_facade.rb index fb70032f..0e6a9344 100644 --- a/kpm/lib/kpm/nexus_helper/nexus_facade.rb +++ b/kpm/lib/kpm/nexus_helper/nexus_facade.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require_relative 'actions' module KPM module NexusFacade @@ -11,10 +13,10 @@ def logger class RemoteFactory class << self - def create(overrides, ssl_verify=true, logger=nil) - Actions.new(overrides, ssl_verify,logger || NexusFacade.logger) + def create(overrides, ssl_verify = true, logger = nil) + Actions.new(overrides, ssl_verify, logger || NexusFacade.logger) end end end end -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/plugins_directory.rb b/kpm/lib/kpm/plugins_directory.rb index c6a59fa5..794139da 100644 --- a/kpm/lib/kpm/plugins_directory.rb +++ b/kpm/lib/kpm/plugins_directory.rb @@ -1,26 +1,31 @@ +# frozen_string_literal: true + require 'open-uri' require 'yaml' module KPM class PluginsDirectory - def self.all(latest=false) + def self.all(latest = false) if latest # Look at GitHub (source of truth) - source = URI.parse('https://raw.githubusercontent.com/killbill/killbill-cloud/master/kpm/lib/kpm/plugins_directory.yml').read + begin + source = URI.parse('https://raw.githubusercontent.com/killbill/killbill-cloud/master/kpm/lib/kpm/plugins_directory.yml').read + rescue StandardError + # Default to built-in version if GitHub isn't accessible + return all(false) + end YAML.load(source) else - source = File.join(File.expand_path(File.dirname(__FILE__)), 'plugins_directory.yml') + source = File.join(__dir__, 'plugins_directory.yml') YAML.load_file(source) end end - - def self.list_plugins(latest=false, kb_version) - all(latest).inject({}) { |out, (key, val)| out[key]=val[:versions][kb_version.to_sym] if val[:versions].key?(kb_version.to_sym) ; out} + def self.list_plugins(latest = false, kb_version) + all(latest).each_with_object({}) { |(key, val), out| out[key] = val[:versions][kb_version.to_sym] if val[:versions].key?(kb_version.to_sym); } end - # Note: this API is used in Docker images (see kpm_generator.rb, careful when changing it!) - def self.lookup(raw_plugin_key, latest=false, raw_kb_version=nil) + def self.lookup(raw_plugin_key, latest = false, raw_kb_version = nil) plugin_key = raw_plugin_key.to_s.downcase plugin = all(latest)[plugin_key.to_sym] return nil if plugin.nil? @@ -36,7 +41,7 @@ def self.lookup(raw_plugin_key, latest=false, raw_kb_version=nil) if raw_kb_version == 'LATEST' version = 'LATEST' else - captures = raw_kb_version.nil? ? [] : raw_kb_version.scan(/(\d+\.\d+)(\.\d)?/) + captures = raw_kb_version.nil? ? [] : raw_kb_version.scan(/(\d+\.\d+)(\.\d+)?/) if captures.empty? || captures.first.nil? || captures.first.first.nil? version = 'LATEST' else diff --git a/kpm/lib/kpm/plugins_directory.yml b/kpm/lib/kpm/plugins_directory.yml index 36cbf3d1..f577fe52 100644 --- a/kpm/lib/kpm/plugins_directory.yml +++ b/kpm/lib/kpm/plugins_directory.yml @@ -2,237 +2,78 @@ :accertify: :type: :java :versions: - :0.14: 0.1.0 - :0.16: 0.2.0 :0.18: 0.3.0 - :require: - - :org.killbill.billing.plugin.accertify.url - - :org.killbill.billing.plugin.accertify.username - - :org.killbill.billing.plugin.accertify.password :adyen: :type: :java :versions: - :0.14: 0.1.0 - :0.15: 0.2.1 - :0.16: 0.3.2 - :0.17: 0.4.10 - :0.18: 0.5.10 - :0.19: 0.6.0 - :require: - - :org.killbill.billing.plugin.adyen.merchantAccount - - :org.killbill.billing.plugin.adyen.username - - :org.killbill.billing.plugin.adyen.password - - :org.killbill.billing.plugin.adyen.paymentUrl + :0.18: 0.5.26 + :0.20: 0.7.0 :analytics: :type: :java :versions: - :0.14: 1.0.3 - :0.15: 2.0.1 - :0.16: 3.0.2 - :0.17: 4.0.5 :0.18: 4.2.5 - :0.19: 5.1.4 - :0.20: 6.0.0 + :0.20: 6.0.1 + :0.22: 7.0.8 :avatax: :type: :java :versions: - :0.14: 0.1.0 - :0.15: 0.2.0 - :0.16: 0.3.0 :0.18: 0.4.1 - :0.19: 0.5.1 - :0.20: 0.6.0 - :require: - - :org.killbill.billing.plugin.avatax.url - - :org.killbill.billing.plugin.avatax.accountNumber - - :org.killbill.billing.plugin.avatax.licenseKey -:braintree_blue: - :type: :ruby - :versions: - :0.14: 0.0.1 - :0.16: 0.2.1 - :0.18: 0.3.1 - :require: - - :merchant_id - - :public_key - - :private_key -:currency: - :type: :ruby - :artifact_id: killbill-currency-plugin - :versions: - :0.16: 2.0.0 - :0.18: 3.0.0 + :0.20: 0.6.1 + :0.22: 0.7.0 :cybersource: :type: :ruby :versions: - :0.14: 1.0.0 - :0.15: 3.3.0 - :0.16: 4.0.12 :0.18: 5.2.7 - :require: - - :login - - :password :dwolla: :type: :java :versions: :0.18: 0.1.0 - :require: - - :org.killbill.billing.plugin.dwolla.baseUrl - - :org.killbill.billing.plugin.dwolla.baseOAuthUrl - - :org.killbill.billing.plugin.dwolla.scopes - - :org.killbill.billing.plugin.dwolla.clientId - - :org.killbill.billing.plugin.dwolla.clientSecret - - :org.killbill.billing.plugin.dwolla.accountId :email-notifications: :type: :java :artifact_id: killbill-email-notifications-plugin :versions: - :0.14: 0.1.0 - :0.16: 0.2.0 :0.18: 0.3.1 - :0.19: 0.4.0 - :0.20: 0.5.0 -:firstdata_e4: - :type: :ruby - :artifact_id: firstdata-e4-plugin - :versions: - :0.16: 0.1.0 - :0.18: 0.2.0 - :require: - - :login - - :password + :0.20: 0.5.1 + :0.22: 0.6.1 :forte: :type: :java :versions: - :0.14: 0.1.0 - :0.16: 0.2.0 :0.18: 0.3.0 - :require: - - :org.killbill.billing.plugin.forte.merchantId - - :org.killbill.billing.plugin.forte.password - - :org.killbill.billing.plugin.forte.host - - :org.killbill.billing.plugin.forte.port - - :org.killbill.billing.plugin.forte.apiLoginId - - :org.killbill.billing.plugin.forte.secureTransactionKey :kpm: :type: :ruby :versions: - :0.15: 0.0.2 - :0.16: 0.0.5 - :0.17: 1.0.0 :0.18: 1.1.2 - :0.19: 1.2.4 :0.20: 1.3.0 -:litle: - :type: :ruby - :versions: - :0.14: 2.0.0 - :0.16: 3.0.0 - :0.18: 4.0.0 - :0.20: 5.0.0 - :require: - - :account_id - - :merchant_id - - :username - - :password - - :secure_page_url - - :paypage_id -:logging: - :type: :ruby - :versions: - :0.14: 1.7.0 - :0.15: 2.0.0 - :0.16: 3.0.0 - :0.18: 4.0.0 :orbital: :type: :ruby :versions: - :0.16: 0.0.2 - :0.18: 0.1.10 - :require: - - :login - - :password - - :merchant_id + :0.18: 0.1.15 :payment_bridge: :type: :java :artifact_id: bridge-plugin :versions: - :0.19: 0.0.12 - :0.20: 0.1.0 -:payeezy: - :type: :java - :versions: - :0.17: 0.1.0 - :require: - - :org.killbill.billing.plugin.payeezy.apiKey - - :org.killbill.billing.plugin.payeezy.token - - :org.killbill.billing.plugin.payeezy.secret - - :org.killbill.billing.plugin.payeezy.paymentUrl + :0.20: 0.1.2 + :0.21: 0.2.1 :payment-retries: :type: :java :versions: - :0.16: 0.0.1 - :0.17: 0.1.0 :0.18: 0.2.4 :paypal: :type: :ruby :artifact_id: paypal-express-plugin :versions: - :0.14: 2.0.0 - :0.15: 3.0.0 - :0.16: 4.1.7 - :0.18: 5.0.9 + :0.18: 5.0.15 :0.20: 6.0.0 - :require: - - :signature - - :login - - :password -:payu_latam: - :type: :ruby - :artifact_id: payu-latam-plugin - :versions: - :0.14: 0.1.0 - :0.16: 0.2.0 - :0.18: 0.3.0 - :require: - - :api_login - - :api_key - - :country_account_id - - :merchant_id :payment-test: - :type: :ruby + :type: :java :artifact_id: payment-test-plugin :versions: - :0.18: 4.2.0 - :0.19: 5.0.0 - :0.20: 6.0.0 + :0.22: 7.0.4 :securenet: :type: :ruby :versions: - :0.16: 0.1.0 :0.18: 0.2.0 - :require: - - :login - - :password :stripe: - :type: :ruby - :versions: - :0.14: 1.0.0 - :0.15: 2.0.0 - :0.16: 3.0.3 - :0.17: 4.0.0 - :0.18: 4.1.1 - :0.19: 5.0.0 - :0.20: 6.0.0 - :require: - - :api_secret_key -:zendesk: - :type: :ruby + :type: :java :versions: - :0.14: 1.3.0 - :0.16: 2.0.0 - :0.18: 3.0.1 - :require: - - :subdomain - - :username - - :password + :0.22: 7.0.4 diff --git a/kpm/lib/kpm/plugins_manager.rb b/kpm/lib/kpm/plugins_manager.rb index 536efd1b..71b639a1 100644 --- a/kpm/lib/kpm/plugins_manager.rb +++ b/kpm/lib/kpm/plugins_manager.rb @@ -1,15 +1,16 @@ +# frozen_string_literal: true + require 'pathname' require 'json' module KPM class PluginsManager - def initialize(plugins_dir, logger) @plugins_dir = Pathname.new(plugins_dir) @logger = logger end - def set_active(plugin_name_or_path, plugin_version=nil) + def set_active(plugin_name_or_path, plugin_version = nil) if plugin_name_or_path.nil? @logger.warn('Unable to mark a plugin as active: no name or path specified') return @@ -19,7 +20,7 @@ def set_active(plugin_name_or_path, plugin_version=nil) # Full path specified, with version link = Pathname.new(plugin_name_or_path).join('../SET_DEFAULT') FileUtils.rm_f(link) - FileUtils.ln_s(plugin_name_or_path, link, :force => true) + FileUtils.ln_s(plugin_name_or_path, link, force: true) else # Plugin name (fs directory) specified plugin_dir_glob = @plugins_dir.join('*').join(plugin_name_or_path) @@ -28,7 +29,7 @@ def set_active(plugin_name_or_path, plugin_version=nil) plugin_dir = Pathname.new(plugin_dir_path) link = plugin_dir.join('SET_DEFAULT') FileUtils.rm_f(link) - FileUtils.ln_s(plugin_dir.join(plugin_version), link, :force => true) + FileUtils.ln_s(plugin_dir.join(plugin_version), link, force: true) end end @@ -38,7 +39,7 @@ def set_active(plugin_name_or_path, plugin_version=nil) end end - def uninstall(plugin_name_or_path, plugin_version=nil) + def uninstall(plugin_name_or_path, plugin_version = nil) update_fs(plugin_name_or_path, plugin_version) do |tmp_dir| FileUtils.rm_f(tmp_dir.join('restart.txt')) # Be safe, keep the code, just never start it @@ -46,7 +47,7 @@ def uninstall(plugin_name_or_path, plugin_version=nil) end end - def restart(plugin_name_or_path, plugin_version=nil) + def restart(plugin_name_or_path, plugin_version = nil) update_fs(plugin_name_or_path, plugin_version) do |tmp_dir| # Remove disabled.txt so that the plugin is started if it was stopped FileUtils.rm_f(tmp_dir.join('disabled.txt')) @@ -59,20 +60,19 @@ def validate_plugin_identifier_key(plugin_key, coordinate_map) entry = identifiers[plugin_key] if entry coordinate_map.each_pair do |key, value| - return false if !validate_plugin_identifier_key_value(plugin_key, key, entry[key.to_s], value) + return false unless validate_plugin_identifier_key_value(plugin_key, key, entry[key.to_s], value) end end true end def add_plugin_identifier_key(plugin_key, plugin_name, language, coordinate_map) - identifiers = read_plugin_identifiers # If key does not already exists or if the version in the json is not the one we are currently installing we update the entry, if not nothing to do - if !identifiers.has_key?(plugin_key) || + if !identifiers.key?(plugin_key) || (coordinate_map && identifiers[plugin_key]['version'] != coordinate_map[:version]) - entry = {'plugin_name' => plugin_name} + entry = { 'plugin_name' => plugin_name } entry['language'] = language if coordinate_map entry['group_id'] = coordinate_map[:group_id] @@ -91,7 +91,7 @@ def add_plugin_identifier_key(plugin_key, plugin_name, language, coordinate_map) def remove_plugin_identifier_key(plugin_key) identifiers = read_plugin_identifiers # If key does not already exists we update it, if not nothing to do. - if identifiers.has_key?(plugin_key) + if identifiers.key?(plugin_key) identifiers.delete(plugin_key) write_plugin_identifiers(identifiers) end @@ -101,7 +101,7 @@ def remove_plugin_identifier_key(plugin_key) def get_plugin_key_and_name(plugin_name_or_key) identifiers = read_plugin_identifiers - if identifiers.has_key?(plugin_name_or_key) + if identifiers.key?(plugin_name_or_key) # It's a plugin key [plugin_name_or_key, identifiers[plugin_name_or_key]['plugin_name']] else @@ -121,19 +121,20 @@ def get_identifier_key_and_entry(plugin_name_or_key) def guess_plugin_name(artifact_id) return nil if artifact_id.nil? + captures = artifact_id.scan(/(.*)-plugin/) - if captures.empty? || captures.first.nil? || captures.first.first.nil? - short_name = artifact_id - else - # 'analytics-plugin' or 'stripe-plugin' passed - short_name = captures.first.first - end + short_name = if captures.empty? || captures.first.nil? || captures.first.first.nil? + artifact_id + else + # 'analytics-plugin' or 'stripe-plugin' passed + captures.first.first + end Dir.glob(@plugins_dir.join('*').join('*')).each do |plugin_path| plugin_name = File.basename(plugin_path) if plugin_name == short_name || - plugin_name == artifact_id || - !plugin_name.scan(/-#{short_name}/).empty? || - !plugin_name.scan(/#{short_name}-/).empty? + plugin_name == artifact_id || + !plugin_name.scan(/-#{short_name}/).empty? || + !plugin_name.scan(/#{short_name}-/).empty? return plugin_name end end @@ -148,6 +149,7 @@ def read_plugin_identifiers JSON.parse(f.read) end rescue Errno::ENOENT + # Ignore end identifiers end @@ -166,7 +168,6 @@ def validate_plugin_identifier_key_value(plugin_key, value_type, entry_value, co end def write_plugin_identifiers(identifiers) - path = Pathname.new(@plugins_dir).join('plugin_identifiers.json') Dir.mktmpdir do |tmp_dir| tmp_path = Pathname.new(tmp_dir).join('plugin_identifiers.json') @@ -179,13 +180,17 @@ def write_plugin_identifiers(identifiers) end # Note: the plugin name here is the directory name on the filesystem - def update_fs(plugin_name_or_path, plugin_version=nil, &block) + def update_fs(plugin_name_or_path, plugin_version = nil) if plugin_name_or_path.nil? @logger.warn('Unable to update the filesystem: no name or path specified') return end - p = plugin_version.nil? ? plugin_name_or_path : @plugins_dir.join('*').join(plugin_name_or_path).join(plugin_version == :all ? '*' : plugin_version) + p = if plugin_version.nil? + plugin_name_or_path + else + @plugins_dir.join('*').join(plugin_name_or_path).join(plugin_version == :all ? '*' : plugin_version) + end modified = [] Dir.glob(p).each do |plugin_dir_path| diff --git a/kpm/lib/kpm/sha1_checker.rb b/kpm/lib/kpm/sha1_checker.rb index aec83443..f22a1367 100644 --- a/kpm/lib/kpm/sha1_checker.rb +++ b/kpm/lib/kpm/sha1_checker.rb @@ -1,20 +1,21 @@ +# frozen_string_literal: true + require 'logger' require 'yaml' require 'pathname' module KPM class Sha1Checker - - def self.from_file(sha1_file, logger=nil) + def self.from_file(sha1_file, logger = nil) Sha1Checker.new(sha1_file, logger) end - def initialize(sha1_file, logger=nil) + def initialize(sha1_file, logger = nil) @sha1_file = sha1_file init! if logger.nil? - @logger = Logger.new(STDOUT) + @logger = Logger.new(STDOUT) @logger.level = Logger::INFO else @logger = logger @@ -22,25 +23,68 @@ def initialize(sha1_file, logger=nil) end def sha1(coordinates) - @sha1_config['sha1'][coordinates] + sha1_cache[coordinates] end - def all_sha1() - @sha1_config['sha1'] + def all_sha1 + sha1_cache end def add_or_modify_entry!(coordinates, remote_sha1) - @sha1_config['sha1'][coordinates] = remote_sha1 + sha1_cache[coordinates] = remote_sha1 save! end def remove_entry!(coordinates) - @sha1_config['sha1'].delete(coordinates) + sha1_cache.delete(coordinates) + nexus_cache.delete(coordinates) + save! + end + + def artifact_info(coordinates) + nexus_cache[coordinates] + end + + def cache_artifact_info(coordinates_with_maybe_latest, artifact_info) + return if artifact_info.nil? + + if coordinates_with_maybe_latest.end_with?('LATEST') + return nil if artifact_info[:version].nil? + + coordinates = coordinates_with_maybe_latest.gsub(/LATEST$/, artifact_info[:version]) + else + coordinates = coordinates_with_maybe_latest + end + + # See BaseArtifact#artifact_info + nexus_keys = %i[sha1 version repository_path is_tgz] + nexus_cache[coordinates] = artifact_info.select { |key, _| nexus_keys.include? key } + save! + end + + def killbill_info(version) + killbill_cache[version] + end + + def cache_killbill_info(version, dependencies) + killbill_cache[version] = dependencies save! end private + def sha1_cache + @sha1_config['sha1'] ||= {} + end + + def nexus_cache + @sha1_config['nexus'] ||= {} + end + + def killbill_cache + @sha1_config['killbill'] ||= {} + end + def save! Dir.mktmpdir do |tmp_destination_dir| tmp_file = File.join(tmp_destination_dir, File.basename(@sha1_file)) @@ -53,7 +97,7 @@ def save! end def init! - if !File.exists?(@sha1_file) + unless File.exist?(@sha1_file) create_sha1_directory_if_missing init_config = {} init_config['sha1'] = {} @@ -66,14 +110,11 @@ def init! def create_sha1_directory_if_missing sha1_dir = Pathname(@sha1_file).dirname - if ! File.directory?(sha1_dir) - FileUtils.mkdir_p(sha1_dir) - end + FileUtils.mkdir_p(sha1_dir) unless File.directory?(sha1_dir) end def reload! - @sha1_config = YAML::load_file(@sha1_file) + @sha1_config = YAML.load_file(@sha1_file) end - end end diff --git a/kpm/lib/kpm/system.rb b/kpm/lib/kpm/system.rb index cabb35f5..b84068c0 100644 --- a/kpm/lib/kpm/system.rb +++ b/kpm/lib/kpm/system.rb @@ -1,21 +1,23 @@ +# frozen_string_literal: true + require 'yaml' require_relative 'system_helpers/system_proxy' module KPM class System - MAX_VALUE_COLUMN_WIDTH = 60 DEFAULT_BUNDLE_DIR = Dir['/var' + File::SEPARATOR + 'lib' + File::SEPARATOR + 'killbill' + File::SEPARATOR + 'bundles'][0] || Dir['/var' + File::SEPARATOR + 'tmp' + File::SEPARATOR + 'bundles'][0] DEFAULT_KAUI_SEARCH_BASE_DIR = '**' + File::SEPARATOR + 'kaui' DEFAULT_KILLBILL_SEARCH_BASE_DIR = '**' + File::SEPARATOR + 'ROOT' - def initialize + def initialize(logger) + @logger = logger @formatter = KPM::Formatter.new end def information(bundles_dir = nil, output_as_json = false, config_file = nil, kaui_web_path = nil, killbill_web_path = nil) - set_config(config_file) - killbill_information = show_killbill_information(kaui_web_path,killbill_web_path,output_as_json) + self.config = config_file + killbill_information = show_killbill_information(kaui_web_path, killbill_web_path, output_as_json) java_version = `java -version 2>&1`.split("\n")[0].split('"')[1] @@ -26,14 +28,14 @@ def information(bundles_dir = nil, output_as_json = false, config_file = nil, ka disk_space_information = show_disk_space_information(output_as_json) entropy_available = show_entropy_available(output_as_json) - if not java_version.nil? - command = get_command - java_system_information = show_java_system_information(command,output_as_json) + unless java_version.nil? + command = java_command + java_system_information = show_java_system_information(command, output_as_json) end - plugin_information = show_plugin_information(get_plugin_path || bundles_dir || DEFAULT_BUNDLE_DIR, output_as_json) + plugin_information = show_plugin_information(plugin_path || bundles_dir || DEFAULT_BUNDLE_DIR, output_as_json) - json_data = Hash.new + json_data = {} json_data[:killbill_information] = killbill_information json_data[:environment_information] = environment_information json_data[:os_information] = os_information @@ -48,133 +50,117 @@ def information(bundles_dir = nil, output_as_json = false, config_file = nil, ka end def show_killbill_information(kaui_web_path, killbill_web_path, output_as_json) - kpm_version = KPM::VERSION - kaui_version = get_kaui_version(get_kaui_web_path || kaui_web_path) - killbill_version = get_killbill_version(get_killbill_web_path || killbill_web_path) - kaui_standalone_version = get_kaui_standalone_version(get_kaui_web_path || kaui_web_path) + kaui_version = kaui_version(kaui_web_path || kaui_web_path) + killbill_version = killbill_version(killbill_web_path || killbill_web_path) + kaui_standalone_version = kaui_standalone_version(kaui_web_path || kaui_web_path) - environment = Hash[:kpm => {:system=>'KPM',:version => kpm_version}, - :kaui => {:system=>'Kaui',:version => kaui_version.nil? ? 'not found' : kaui_version}, - :kaui_standalone => {:system=>'Kaui standalone',:version => kaui_standalone_version.nil? ? 'not found' : kaui_standalone_version}, - :killbill => {:system=>'Killbill',:version => killbill_version.nil? ? 'not found' : killbill_version}] + environment = Hash[kpm: { system: 'KPM', version: kpm_version }, + kaui: { system: 'Kaui', version: kaui_version.nil? ? 'not found' : kaui_version }, + kaui_standalone: { system: 'Kaui standalone', version: kaui_standalone_version.nil? ? 'not found' : kaui_standalone_version }, + killbill: { system: 'Killbill', version: killbill_version.nil? ? 'not found' : killbill_version }] - labels = [{:label => :system}, - {:label => :version}] + labels = [{ label: :system }, + { label: :version }] - if not output_as_json - @formatter.format(environment,labels) - end + @formatter.format(environment, labels) unless output_as_json environment end def show_environment_information(java_version, output_as_json) + environment = Hash[ruby: { environment: 'Ruby', version: RUBY_VERSION }, + java: { environment: 'Java', version: java_version.nil? ? 'no version found' : java_version }] - environment = Hash[:ruby => {:environment=>'Ruby',:version => RUBY_VERSION}, - :java => {:environment=>'Java',:version => java_version.nil? ? 'no version found' : java_version}] - - labels = [{:label => :environment}, - {:label => :version}] + labels = [{ label: :environment }, + { label: :version }] - if not output_as_json - @formatter.format(environment,labels) - end + @formatter.format(environment, labels) unless output_as_json environment end def show_cpu_information(output_as_json) - cpu_info = KPM::SystemProxy::CpuInformation.fetch - labels = KPM::SystemProxy::CpuInformation.get_labels + cpu_information = KPM::SystemProxy::CpuInformation.new + cpu_info = cpu_information.cpu_info + labels = cpu_information.labels - unless output_as_json - @formatter.format(cpu_info,labels) - end + @formatter.format(cpu_info, labels) unless output_as_json cpu_info end def show_memory_information(output_as_json) - memory_info = KPM::SystemProxy::MemoryInformation.fetch - labels = KPM::SystemProxy::MemoryInformation.get_labels + memory_information = KPM::SystemProxy::MemoryInformation.new + memory_info = memory_information.memory_info + labels = memory_information.labels - unless output_as_json - @formatter.format(memory_info,labels) - end + @formatter.format(memory_info, labels) unless output_as_json memory_info end def show_disk_space_information(output_as_json) - disk_space_info = KPM::SystemProxy::DiskSpaceInformation.fetch - labels = KPM::SystemProxy::DiskSpaceInformation.get_labels + disk_space_information = KPM::SystemProxy::DiskSpaceInformation.new + disk_space_info = disk_space_information.disk_space_info + labels = disk_space_information.labels - unless output_as_json - @formatter.format(disk_space_info,labels) - end + @formatter.format(disk_space_info, labels) unless output_as_json disk_space_info end def show_entropy_available(output_as_json) - entropy_available = KPM::SystemProxy::EntropyAvailable.fetch - labels = KPM::SystemProxy::EntropyAvailable.get_labels + entropy_information = KPM::SystemProxy::EntropyAvailable.new + entropy_available = entropy_information.entropy_available + labels = entropy_information.labels - unless output_as_json - @formatter.format(entropy_available,labels) - end + @formatter.format(entropy_available, labels) unless output_as_json entropy_available end def show_os_information(output_as_json) - os_information = KPM::SystemProxy::OsInformation.fetch - labels = KPM::SystemProxy::OsInformation.get_labels + os_information = KPM::SystemProxy::OsInformation.new + os_info = os_information.os_info + labels = os_information.labels - unless output_as_json - @formatter.format(os_information,labels) - end + @formatter.format(os_info, labels) unless output_as_json - os_information + os_info end def show_java_system_information(command, output_as_json) - java_system = Hash.new - property_count = 0; + java_system = {} + property_count = 0 last_key = '' `#{command}`.split("\n").each do |prop| - - if prop.to_s.strip.empty? - break; - end + break if prop.to_s.strip.empty? if property_count > 0 props = prop.split('=') - if (not props[1].nil? && props[1].to_s.strip.size > MAX_VALUE_COLUMN_WIDTH) && output_as_json == false + if !(props[1].nil? && props[1].to_s.strip.size > MAX_VALUE_COLUMN_WIDTH) && output_as_json == false chunks = ".{1,#{MAX_VALUE_COLUMN_WIDTH}}" props[1].to_s.scan(/#{chunks}/).each_with_index do |p, index| - - java_system[property_count] = {:java_property => index.equal?(0) ? props[0] : '', :value => p} + java_system[property_count] = { java_property: index.equal?(0) ? props[0] : '', value: p } property_count += 1 - end elsif output_as_json key = (props[1].nil? ? last_key : props[0]).to_s.strip value = props[1].nil? ? props[0] : props[1] - if java_system.has_key?(key) + if java_system.key?(key) java_system[key][:value] = java_system[key][:value].to_s.concat(' ').concat(value) else - java_system[key] = {:java_property => key, :value => value} + java_system[key] = { java_property: key, value: value } end else - java_system[property_count] = {:java_property => props[1].nil? ? '' : props[0], :value => props[1].nil? ? props[0] : props[1]} + java_system[property_count] = { java_property: props[1].nil? ? '' : props[0], value: props[1].nil? ? props[0] : props[1] } end @@ -182,22 +168,16 @@ def show_java_system_information(command, output_as_json) end property_count += 1 - end - labels = [{:label => :java_property}, - {:label => :value}] + labels = [{ label: :java_property }, + { label: :value }] - - if not output_as_json - @formatter.format(java_system,labels) - end + @formatter.format(java_system, labels) unless output_as_json java_system - end def show_plugin_information(bundles_dir, output_as_json) - if bundles_dir.nil? all_plugins = nil else @@ -206,28 +186,29 @@ def show_plugin_information(bundles_dir, output_as_json) end unless output_as_json - if all_plugins.nil? || all_plugins.size == 0 + if all_plugins.nil? || all_plugins.empty? puts "\e[91;1mNo KB plugin information available\e[0m\n\n" else @formatter.format(all_plugins) end end - if output_as_json && (all_plugins.nil? || all_plugins.size == 0) - all_plugins = 'No KB plugin information available' - end + all_plugins = 'No KB plugin information available' if output_as_json && (all_plugins.nil? || all_plugins.empty?) all_plugins end - def get_kaui_standalone_version(kaui_web_path = nil) + private + + def kaui_standalone_version(kaui_web_path = nil) kaui_search_default_dir = kaui_web_path.nil? ? DEFAULT_KAUI_SEARCH_BASE_DIR : Dir[kaui_web_path][0] return nil if kaui_search_default_dir.nil? - kaui_search_default_dir.gsub!('.war','') + + kaui_search_default_dir.gsub!('.war', '') version = nil yaml_file = kaui_search_default_dir + File::SEPARATOR + 'WEB-INF' + File::SEPARATOR + 'version.yml' unless Dir[yaml_file][0].nil? - yml_data = YAML::load_file(Dir[yaml_file][0]) + yml_data = YAML.load_file(Dir[yaml_file][0]) version = yml_data['version'] end @@ -235,10 +216,11 @@ def get_kaui_standalone_version(kaui_web_path = nil) version end - def get_kaui_version(kaui_web_path = nil) + def kaui_version(kaui_web_path = nil) kaui_search_default_dir = kaui_web_path.nil? ? DEFAULT_KAUI_SEARCH_BASE_DIR : Dir[kaui_web_path][0] return nil if kaui_search_default_dir.nil? - kaui_search_default_dir.gsub!('.war','') + + kaui_search_default_dir.gsub!('.war', '') version = nil gemfile = Dir[kaui_search_default_dir + File::SEPARATOR + 'WEB-INF' + File::SEPARATOR + 'Gemfile'] @@ -246,13 +228,13 @@ def get_kaui_version(kaui_web_path = nil) unless gemfile[0].nil? absolute_gemfile_path = File.absolute_path(gemfile[0]) - version = open(absolute_gemfile_path) do |f| + version = File.open(absolute_gemfile_path, 'r') do |f| f.each_line.detect do |line| - if /kaui/.match(line) - version = /(\d+)\.(\d+)\.(\d+)/.match(line) + next unless /kaui/.match(line) + + version = /(\d+)\.(\d+)\.(\d+)/.match(line) - break unless version.nil? - end + break unless version.nil? end version end @@ -262,18 +244,19 @@ def get_kaui_version(kaui_web_path = nil) version end - def get_killbill_version(killbill_web_path = nil) + def killbill_version(killbill_web_path = nil) killbill_search_default_dir = killbill_web_path.nil? ? DEFAULT_KILLBILL_SEARCH_BASE_DIR : Dir[killbill_web_path][0] return nil if killbill_search_default_dir.nil? - killbill_search_default_dir.gsub!('.war','') - killbill_search_default_dir.gsub!('webapps','**') - file = Dir[killbill_search_default_dir + File::SEPARATOR + 'META-INF' + File::SEPARATOR + '**' + File::SEPARATOR + 'pom.properties'] + killbill_search_default_dir.gsub!('.war', '') + killbill_search_default_dir.gsub!('webapps', '**') + + file = Dir[killbill_search_default_dir + File::SEPARATOR + 'META-INF' + File::SEPARATOR + '**' + File::SEPARATOR + 'pom.properties'] version = nil unless file[0].nil? absolute_file_path = File.absolute_path(file[0]) - version = open(absolute_file_path) do |f| + version = File.open(absolute_file_path, 'r') do |f| f.each_line.detect do |line| version = /(\d+)\.(\d+)\.(\d+)/.match(line) @@ -287,93 +270,79 @@ def get_killbill_version(killbill_web_path = nil) version end - def get_command + def java_command command = 'java -XshowSettings:properties -version 2>&1' - apache_tomcat_pid = get_apache_tomcat_pid + apache_tomcat_pid = find_apache_tomcat_pid + @logger.debug("Found Tomcat PID: #{apache_tomcat_pid}") - if not apache_tomcat_pid.nil? - command = "jcmd #{apache_tomcat_pid} VM.system_properties" - end + command = "jcmd #{apache_tomcat_pid} VM.system_properties" unless apache_tomcat_pid.nil? command end - def get_apache_tomcat_pid - apache_tomcat_pid = nil; + def find_apache_tomcat_pid + apache_tomcat_pid = nil `jcmd -l 2>&1`.split("\n").each do |line| - if /org.apache.catalina/.match(line) words = line.split(' ') apache_tomcat_pid = words[0] end - end return apache_tomcat_pid unless apache_tomcat_pid.nil? - jcmd = ( ENV['JAVA_HOME'] || '/**' ) + File::Separator + 'bin' + File::Separator + 'jcmd' + jcmd = (ENV['JAVA_HOME'] || '/**') + File::Separator + 'bin' + File::Separator + 'jcmd' jcmd = Dir[jcmd][0] return nil if jcmd.nil? - apache_tomcat_pid = `#{jcmd} | awk '/org.apache.catalina/' | cut -d ' ' -f 1`.gsub("\n",'') + apache_tomcat_pid = `#{jcmd} | awk '/org.apache.catalina/' | cut -d ' ' -f 1`.gsub("\n", '') return nil if apache_tomcat_pid.nil? || apache_tomcat_pid.empty? apache_tomcat_pid end - def set_config(config_file = nil) + def config=(config_file = nil) @config = nil - if not config_file.nil? - if not Dir[config_file][0].nil? - @config = YAML::load_file(config_file) - end - end + return if config_file.nil? + @config = YAML.load_file(config_file) unless Dir[config_file][0].nil? end - def get_kaui_web_path - kaui_web_path = nil; + def kaui_web_path + kaui_web_path = nil - if not @config.nil? + unless @config.nil? config_kaui = @config['kaui'] - if not config_kaui.nil? - kaui_web_path = Dir[config_kaui['webapp_path']][0] - end + kaui_web_path = Dir[config_kaui['webapp_path']][0] unless config_kaui.nil? end kaui_web_path end - def get_killbill_web_path - killbill_web_path = nil; + def killbill_web_path + killbill_web_path = nil - if not @config.nil? + unless @config.nil? config_killbill = @config['killbill'] - if not config_killbill.nil? - killbill_web_path = Dir[config_killbill['webapp_path']][0] - end + killbill_web_path = Dir[config_killbill['webapp_path']][0] unless config_killbill.nil? end killbill_web_path end - def get_plugin_path - plugin_path = nil; + def plugin_path + plugin_path = nil - if not @config.nil? + unless @config.nil? config_killbill = @config['killbill'] - if not config_killbill.nil? - plugin_path = Dir[config_killbill['plugins_dir']][0] - end + plugin_path = Dir[config_killbill['plugins_dir']][0] unless config_killbill.nil? end plugin_path end - end - -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/system_helpers/cpu_information.rb b/kpm/lib/kpm/system_helpers/cpu_information.rb index 2fefc378..236092f3 100644 --- a/kpm/lib/kpm/system_helpers/cpu_information.rb +++ b/kpm/lib/kpm/system_helpers/cpu_information.rb @@ -1,72 +1,73 @@ +# frozen_string_literal: true + module KPM module SystemProxy - module CpuInformation - class << self - def fetch - cpu_info = nil - if OS.windows? - cpu_info = fetch_windows - elsif OS.linux? - cpu_info = fetch_linux - elsif OS.mac? - cpu_info = fetch_mac - end + class CpuInformation + attr_reader :cpu_info, :labels - cpu_info - end + def initialize + @cpu_info = fetch + @labels = [{ label: :cpu_detail }, + { label: :value }] + end + + private - def get_labels - labels = [{:label => :cpu_detail}, - {:label => :value}] - labels + def fetch + cpu_info = nil + if OS.windows? + cpu_info = fetch_windows + elsif OS.linux? + cpu_info = fetch_linux + elsif OS.mac? + cpu_info = fetch_mac end - private - def fetch_linux - cpu_data = `cat /proc/cpuinfo 2>&1`.gsub("\t",'') - cpu = get_hash(cpu_data) - cpu - end + cpu_info + end - def fetch_mac - cpu_data = `system_profiler SPHardwareDataType | grep -e "Processor" -e "Cores" -e "Cache" 2>&1` - cpu = get_hash(cpu_data) - cpu - end + def fetch_linux + cpu_data = `cat /proc/cpuinfo 2>&1`.gsub("\t", '') + build_hash(cpu_data) + end - def fetch_windows - cpu_name = `wmic cpu get Name`.split("\n\n") - cpu_caption = `wmic cpu get Caption`.split("\n\n") - cpu_max_clock_speed = `wmic cpu get MaxClockSpeed`.split("\n\n") - cpu_device_id = `wmic cpu get DeviceId`.split("\n\n") - cpu_status = `wmic cpu get Status`.split("\n\n") + def fetch_mac + cpu_data = `system_profiler SPHardwareDataType | grep -e "Processor" -e "Cores" -e "Cache" 2>&1` + build_hash(cpu_data) + end - cpu = Hash.new - cpu[cpu_name[0].to_s.strip] = {:cpu_detail => cpu_name[0].to_s.strip, :value => cpu_name[1].to_s.strip} - cpu[cpu_caption[0].to_s.strip] = {:cpu_detail => cpu_caption[0].to_s.strip, :value => cpu_caption[1].to_s.strip} - cpu[cpu_max_clock_speed[0].to_s.strip] = {:cpu_detail => cpu_max_clock_speed[0].to_s.strip, :value => cpu_max_clock_speed[1].to_s.strip} - cpu[cpu_device_id[0].to_s.strip] = {:cpu_detail => cpu_device_id[0].to_s.strip, :value => cpu_device_id[1].to_s.strip} - cpu[cpu_status[0].to_s.strip] = {:cpu_detail => cpu_status[0].to_s.strip, :value => cpu_status[1].to_s.strip} + def fetch_windows + cpu_name = `wmic cpu get Name`.split("\n\n") + cpu_caption = `wmic cpu get Caption`.split("\n\n") + cpu_max_clock_speed = `wmic cpu get MaxClockSpeed`.split("\n\n") + cpu_device_id = `wmic cpu get DeviceId`.split("\n\n") + cpu_status = `wmic cpu get Status`.split("\n\n") - cpu - end + cpu = {} + cpu[cpu_name[0].to_s.strip] = { cpu_detail: cpu_name[0].to_s.strip, value: cpu_name[1].to_s.strip } + cpu[cpu_caption[0].to_s.strip] = { cpu_detail: cpu_caption[0].to_s.strip, value: cpu_caption[1].to_s.strip } + cpu[cpu_max_clock_speed[0].to_s.strip] = { cpu_detail: cpu_max_clock_speed[0].to_s.strip, value: cpu_max_clock_speed[1].to_s.strip } + cpu[cpu_device_id[0].to_s.strip] = { cpu_detail: cpu_device_id[0].to_s.strip, value: cpu_device_id[1].to_s.strip } + cpu[cpu_status[0].to_s.strip] = { cpu_detail: cpu_status[0].to_s.strip, value: cpu_status[1].to_s.strip } - def get_hash(data) - cpu = Hash.new + cpu + end - unless data.nil? - data.split("\n").each do |info| - infos = info.split(':') + def build_hash(data) + cpu = {} + return cpu if data.nil? - unless infos[0].to_s.strip.eql?('flags') - cpu[infos[0].to_s.strip] = {:cpu_detail => infos[0].to_s.strip, :value => infos[1].to_s.strip} - end - end - end + data.split("\n").each do |info| + infos = info.split(':') - cpu - end + key = infos[0].to_s.strip + next if key.empty? || key.eql?('flags') + + cpu[key] = { cpu_detail: key, value: infos[1].to_s.strip } end + + cpu + end end end -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/system_helpers/disk_space_information.rb b/kpm/lib/kpm/system_helpers/disk_space_information.rb index 0905589d..c3cf6d33 100644 --- a/kpm/lib/kpm/system_helpers/disk_space_information.rb +++ b/kpm/lib/kpm/system_helpers/disk_space_information.rb @@ -1,84 +1,81 @@ +# frozen_string_literal: true + module KPM module SystemProxy - module DiskSpaceInformation - class << self - - def fetch - disk_space_info = nil - if OS.windows? - disk_space_info = fetch_windows - elsif OS.linux? - disk_space_info = fetch_linux_mac(5) - elsif OS.mac? - disk_space_info = fetch_linux_mac(8) - end + class DiskSpaceInformation + attr_reader :disk_space_info, :labels + + def initialize + data_keys = [] + @disk_space_info = fetch(data_keys) + @labels = [] + data_keys.each { |key| @labels.push(label: key.gsub(' ', '_').to_sym) } + end - disk_space_info - end + private - def get_labels - labels = [] - @@data_keys.each { |key| labels.push({:label => key.gsub(' ','_').to_sym})} - labels + def fetch(data_keys) + disk_space_info = nil + if OS.windows? + disk_space_info = fetch_windows(data_keys) + elsif OS.linux? + disk_space_info = fetch_linux_mac(5, data_keys) + elsif OS.mac? + disk_space_info = fetch_linux_mac(8, data_keys) end - private - def fetch_linux_mac(cols_count) - disk_space_info = `df 2>&1` - disk_space = get_hash(disk_space_info,cols_count,true) - disk_space - end + disk_space_info + end - def fetch_windows - disk_space_info = `wmic logicaldisk get size,freespace,caption 2>&1` - disk_space = get_hash(disk_space_info,3,false) - disk_space - end + def fetch_linux_mac(cols_count, data_keys) + disk_space_info = `df 2>&1` + build_hash(disk_space_info, cols_count, true, data_keys) + end - def get_hash(data, cols_count, merge_last_two_columns) - disk_space = Hash.new + def fetch_windows(data_keys) + disk_space_info = `wmic logicaldisk get size,freespace,caption 2>&1` + build_hash(disk_space_info, 3, false, data_keys) + end - unless data.nil? + def build_hash(data, cols_count, merge_last_two_columns, data_keys) + disk_space = {} - data_table = data.split("\n") + unless data.nil? - @@data_keys = data_table[0].split(' ') + data_table = data.split("\n") - if merge_last_two_columns - @@data_keys[@@data_keys.length - 2] = @@data_keys[@@data_keys.length - 2] + ' ' + @@data_keys[@@data_keys.length - 1] - @@data_keys.delete_at(@@data_keys.length - 1) - end + data_keys.concat(data_table[0].split(' ')) - row_num = 0 - data_table.each do |row| - cols = row.split(' ') - row_num += 1 - unless cols[0].to_s.eql?(@@data_keys[0]) - key = 'DiskInfo_'+row_num.to_s - disk_space[key] = Hash.new - cols.each_index do |idx| - if idx > cols_count - break - end - - value = cols[idx].to_s.strip - if idx == cols_count && cols.length - 1 > idx - for i in cols_count+1..cols.length - value += ' ' + cols[i].to_s.strip - end - end - - disk_space[key][@@data_keys[idx].gsub(' ','_').to_sym] = value - end - end + if merge_last_two_columns + data_keys[data_keys.length - 2] = data_keys[data_keys.length - 2] + ' ' + data_keys[data_keys.length - 1] + data_keys.delete_at(data_keys.length - 1) + end + row_num = 0 + data_table.each do |row| + cols = row.split(' ') + row_num += 1 + next if cols[0].to_s.eql?(data_keys[0]) + + key = 'DiskInfo_' + row_num.to_s + disk_space[key] = {} + cols.each_index do |idx| + break if idx > cols_count + + value = cols[idx].to_s.strip + if idx == cols_count && cols.length - 1 > idx + (cols_count + 1..cols.length).each do |i| + value += ' ' + cols[i].to_s.strip + end end - end - disk_space + disk_space[key][data_keys[idx].gsub(' ', '_').to_sym] = value + end end + end + disk_space end end end -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/system_helpers/entropy_available.rb b/kpm/lib/kpm/system_helpers/entropy_available.rb index 7747cdff..a1fd753c 100644 --- a/kpm/lib/kpm/system_helpers/entropy_available.rb +++ b/kpm/lib/kpm/system_helpers/entropy_available.rb @@ -1,52 +1,50 @@ +# frozen_string_literal: true + module KPM module SystemProxy - module EntropyAvailable - class << self - - def fetch - entropy_available = nil - if OS.windows? - entropy_available = fetch_windows - elsif OS.linux? - entropy_available = fetch_linux - elsif OS.mac? - entropy_available = fetch_mac - end - - entropy_available - end + class EntropyAvailable + attr_reader :entropy_available, :labels + + def initialize + @entropy_available = fetch + @labels = [{ label: :entropy }, + { label: :value }] + end + + private - def get_labels - labels = [{:label => :entropy}, - {:label => :value}] - labels + def fetch + entropy_available = nil + if OS.windows? + entropy_available = fetch_windows + elsif OS.linux? + entropy_available = fetch_linux + elsif OS.mac? + entropy_available = fetch_mac end - private - def fetch_linux - entropy_available_data = `cat /proc/sys/kernel/random/entropy_avail 2>&1`.gsub("\n",'') - entropy_available = get_hash(entropy_available_data) - entropy_available - end + entropy_available + end - def fetch_mac - entropy_available = get_hash('-') - entropy_available - end + def fetch_linux + entropy_available_data = `cat /proc/sys/kernel/random/entropy_avail 2>&1`.gsub("\n", '') + build_hash(entropy_available_data) + end - def fetch_windows - entropy_available = get_hash('-') - entropy_available - end + def fetch_mac + build_hash('-') + end - def get_hash(data) - entropy_available = Hash.new - entropy_available['entropy_available'] = {:entropy => 'available', :value => data} + def fetch_windows + build_hash('-') + end - entropy_available - end + def build_hash(data) + entropy_available = {} + entropy_available['entropy_available'] = { entropy: 'available', value: data } + entropy_available end end end -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/system_helpers/memory_information.rb b/kpm/lib/kpm/system_helpers/memory_information.rb index 516dcd71..4f0cf24b 100644 --- a/kpm/lib/kpm/system_helpers/memory_information.rb +++ b/kpm/lib/kpm/system_helpers/memory_information.rb @@ -1,71 +1,72 @@ +# frozen_string_literal: true + module KPM module SystemProxy - module MemoryInformation - class << self + class MemoryInformation + attr_reader :memory_info, :labels - def fetch - memory_info = nil - if OS.windows? - memory_info = fetch_windows - elsif OS.linux? - memory_info = fetch_linux - elsif OS.mac? - memory_info = fetch_mac - end + def initialize + @memory_info = fetch + @labels = [{ label: :memory_detail }, + { label: :value }] + end - memory_info - end + private - def get_labels - labels = [{:label => :memory_detail}, - {:label => :value}] - labels + def fetch + memory_info = nil + if OS.windows? + memory_info = fetch_windows + elsif OS.linux? + memory_info = fetch_linux + elsif OS.mac? + memory_info = fetch_mac end - private - def fetch_linux - mem_data = `cat /proc/meminfo 2>&1`.gsub("\t",'') - mem = get_hash(mem_data) - mem - end + memory_info + end - def fetch_mac - mem_data = `vm_stat 2>&1`.gsub(".",'') - mem = get_hash(mem_data) + def fetch_linux + mem_data = `cat /proc/meminfo 2>&1`.gsub("\t", '') + build_hash(mem_data) + end - mem.each_key do |key| - mem[key][:value] = ((mem[key][:value].to_i * 4096) / 1024 / 1024).to_s + 'MB' - mem[key][:memory_detail] = mem[key][:memory_detail].gsub('Pages','Memory') - end + def fetch_mac + mem_data = `vm_stat 2>&1`.gsub('.', '') + mem_total_data = `system_profiler SPHardwareDataType | grep " Memory:" 2>&1` + build_hash_mac(mem_data, mem_total_data) + end - mem_total_data = `system_profiler SPHardwareDataType | grep " Memory:" 2>&1` - mem_total = get_hash(mem_total_data) + def build_hash_mac(mem_data, mem_total_data) + mem = build_hash(mem_data) - mem = mem_total.merge(mem) + mem.each_key do |key| + mem[key][:value] = ((mem[key][:value].to_i * 4096) / 1024 / 1024).to_s + 'MB' + mem[key][:memory_detail] = mem[key][:memory_detail].gsub('Pages', 'Memory') + end - mem - end + mem_total = build_hash(mem_total_data) - def fetch_windows - mem_data = `systeminfo | findstr /C:"Total Physical Memory" /C:"Available Physical Memory"` - mem = get_hash(mem_data) - mem - end + mem_total.merge(mem) + end - def get_hash(data) - mem = Hash.new + def fetch_windows + mem_data = `systeminfo | findstr /C:"Total Physical Memory" /C:"Available Physical Memory"` + build_hash(mem_data) + end - unless data.nil? - data.split("\n").each do |info| - infos = info.split(':') - mem[infos[0].to_s.strip] = {:memory_detail => infos[0].to_s.strip, :value => infos[1].to_s.strip} - end - end + def build_hash(data) + mem = {} + return mem if data.nil? - mem - end + data.split("\n").each do |info| + infos = info.split(':') + key = infos[0].to_s.strip.gsub('"', '') + mem[key] = { memory_detail: key, value: infos[1].to_s.strip } + end + mem end end end -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/system_helpers/os_information.rb b/kpm/lib/kpm/system_helpers/os_information.rb index 399e8b3a..1f67e887 100644 --- a/kpm/lib/kpm/system_helpers/os_information.rb +++ b/kpm/lib/kpm/system_helpers/os_information.rb @@ -1,66 +1,64 @@ +# frozen_string_literal: true + module KPM module SystemProxy - module OsInformation - class << self + class OsInformation + attr_reader :os_info, :labels - def fetch - os_information = nil - if OS.windows? - os_information = fetch_windows - elsif OS.linux? - os_information = fetch_linux - elsif OS.mac? - os_information = fetch_mac - end + def initialize + @os_info = fetch + @labels = [{ label: :os_detail }, + { label: :value }] + end - os_information - end + private - def get_labels - labels = [{:label => :os_detail}, - {:label => :value}] - labels + def fetch + os_information = nil + if OS.windows? + os_information = fetch_windows + elsif OS.linux? + os_information = fetch_linux + elsif OS.mac? + os_information = fetch_mac end - private - def fetch_linux - os_data = `lsb_release -a 2>&1` + os_information + end - if os_data.nil? || os_data.include?('lsb_release: not found') - os_data = `cat /etc/issue 2>&1` - os_data = 'Description:'+os_data.gsub('\n \l','') - end + def fetch_linux + os_data = `lsb_release -a 2>&1` - os = get_hash(os_data) - os - end + if os_data.nil? || os_data.include?('lsb_release: not found') + os_data = `cat /etc/issue 2>&1` + os_data = 'Description:' + os_data.gsub('\n \l', '') + end - def fetch_mac - os_data = `sw_vers` - os = get_hash(os_data) - os - end + build_hash(os_data) + end - def fetch_windows - os_data = `systeminfo | findstr /C:"OS"` - os = get_hash(os_data) - os - end + def fetch_mac + os_data = `sw_vers` + build_hash(os_data) + end - def get_hash(data) - os = Hash.new + def fetch_windows + os_data = `systeminfo | findstr /C:"OS"` + build_hash(os_data) + end - unless data.nil? - data.split("\n").each do |info| - infos = info.split(':') - os[infos[0].to_s.strip] = {:os_detail => infos[0].to_s.strip, :value => infos[1].to_s.strip} - end - end + def build_hash(data) + os = {} - os + unless data.nil? + data.split("\n").each do |info| + infos = info.split(':') + os[infos[0].to_s.strip] = { os_detail: infos[0].to_s.strip, value: infos[1].to_s.strip } end + end + os end end end -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/system_helpers/system_proxy.rb b/kpm/lib/kpm/system_helpers/system_proxy.rb index 3e6d4ab7..41fab2ec 100644 --- a/kpm/lib/kpm/system_helpers/system_proxy.rb +++ b/kpm/lib/kpm/system_helpers/system_proxy.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require_relative 'cpu_information' require_relative 'memory_information' require_relative 'disk_space_information' @@ -5,24 +7,22 @@ require_relative 'os_information' module KPM module SystemProxy - module OS - def OS.windows? - (/cygwin|mswin|mingw|bccwin|wince|emx/ =~ RbConfig::CONFIG["host_os"]) != nil + def self.windows? + (/cygwin|mswin|mingw|bccwin|wince|emx/ =~ RbConfig::CONFIG['host_os']) != nil end - def OS.mac? - (/darwin/ =~ RbConfig::CONFIG["host_os"]) != nil + def self.mac? + (/darwin/ =~ RbConfig::CONFIG['host_os']) != nil end - def OS.unix? + def self.unix? !OS.windows? end - def OS.linux? - OS.unix? and not OS.mac? + def self.linux? + OS.unix? && !OS.mac? end end - end -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/tasks.rb b/kpm/lib/kpm/tasks.rb index cb96a36e..25c52eb9 100644 --- a/kpm/lib/kpm/tasks.rb +++ b/kpm/lib/kpm/tasks.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require 'highline' require 'logger' require 'thor' @@ -10,81 +12,96 @@ module Tasks def self.included(base) base.send :include, ::Thor::Actions base.class_eval do - - - desc 'KPM version', 'Return current KPM version.' + desc 'version', 'Return current KPM version.' def version say "KPM version #{KPM::VERSION}" end class_option :overrides, - :type => :hash, - :default => nil, - :desc => "A hashed list of overrides. Available options are 'url', 'repository', 'username', and 'password'." + type: :hash, + default: nil, + desc: "A hashed list of overrides. Available options are 'url', 'repository', 'username', and 'password'." class_option :ssl_verify, - :type => :boolean, - :default => true, - :desc => 'Set to false to disable SSL Verification.' + type: :boolean, + default: true, + desc: 'Set to false to disable SSL Verification.' method_option :force_download, - :type => :boolean, - :default => false, - :desc => 'Force download of the artifact even if it exists' + type: :boolean, + default: false, + desc: 'Force download of the artifact even if it exists' method_option :verify_sha1, - :type => :boolean, - :default => true, - :desc => 'Validate sha1 sum' + type: :boolean, + default: true, + desc: 'Validate sha1 sum' desc 'install config_file', 'Install Kill Bill server and plugins according to the specified YAML configuration file.' - def install(config_file=nil) + def install(config_file = nil) help = Installer.from_file(config_file).install(options[:force_download], options[:verify_sha1]) help = JSON(help) say help['help'], :green unless help['help'].nil? end method_option :destination, - :type => :string, - :default => nil, - :desc => 'A different folder other than the default bundles directory.' + type: :string, + default: nil, + desc: 'A different folder other than the default bundles directory.' method_option :force, - :type => :boolean, - :default => nil, - :desc => 'Don\'t ask for confirmation while deleting multiple versions of a plugin.' + type: :boolean, + default: nil, + desc: 'Don\'t ask for confirmation while deleting multiple versions of a plugin.' + method_option :version, + type: :string, + default: nil, + desc: 'Specific plugin version to uninstall' desc 'uninstall plugin', 'Uninstall the specified plugin, identified by its name or key, from current deployment' def uninstall(plugin) - say 'Done!' if Uninstaller.new(options[:destination]).uninstall_plugin(plugin, options[:force]) + Uninstaller.new(options[:destination]).uninstall_plugin(plugin, options[:force], options[:version]) end method_option :destination, - :type => :string, - :default => nil, - :desc => 'A different folder other than the current working directory.' + type: :string, + default: nil, + desc: 'A different folder other than the default bundles directory.' + method_option :dry_run, + type: :boolean, + default: false, + desc: 'Print the plugins which would be deleted' + desc 'cleanup', 'Delete old plugins' + def cleanup + Uninstaller.new(options[:destination]).uninstall_non_default_plugins(options[:dry_run]) + end + + method_option :destination, + type: :string, + default: nil, + desc: 'A different folder other than the current working directory.' method_option :bundles_dir, - :type => :string, - :default => nil, - :desc => 'The location where bundles will be installed (along with sha1 file)' + type: :string, + default: nil, + desc: 'The location where bundles will be installed (along with sha1 file)' method_option :force_download, - :type => :boolean, - :default => false, - :desc => 'Force download of the artifact even if it exists' + type: :boolean, + default: false, + desc: 'Force download of the artifact even if it exists' method_option :verify_sha1, - :type => :boolean, - :default => true, - :desc => 'Validate sha1 sum' - desc 'pull_kb_server_war ', 'Pulls Kill Bill server war from Sonatype and places it on your machine. If version was not specified it uses the latest released version.' - def pull_kb_server_war(version='LATEST') + type: :boolean, + default: true, + desc: 'Validate sha1 sum' + desc 'pull_kb_server_war ', 'Pulls Kill Bill server war and places it on your machine. If version was not specified it uses the latest released version.' + def pull_kb_server_war(version = 'LATEST') installer = BaseInstaller.new(logger, options[:overrides], options[:ssl_verify]) response = installer.install_killbill_server(KillbillServerArtifact::KILLBILL_GROUP_ID, - KillbillServerArtifact::KILLBILL_ARTIFACT_ID, - KillbillServerArtifact::KILLBILL_PACKAGING, - KillbillServerArtifact::KILLBILL_CLASSIFIER, - version, - options[:destination], - options[:bundles_dir], - options[:force_download], - options[:verify_sha1]) + KillbillServerArtifact::KILLBILL_ARTIFACT_ID, + KillbillServerArtifact::KILLBILL_PACKAGING, + KillbillServerArtifact::KILLBILL_CLASSIFIER, + version, + options[:destination], + options[:bundles_dir], + options[:force_download], + options[:verify_sha1]) say "Artifact has been retrieved and can be found at path: #{response[:file_path]}", :green end @@ -97,98 +114,54 @@ def search_for_kb_server options[:ssl_verify]).to_a.join(', ')}", :green end - method_option :destination, - :type => :string, - :default => nil, - :desc => 'A different folder other than the current working directory.' - method_option :bundles_dir, - :type => :string, - :default => nil, - :desc => 'The location where bundles will be installed (along with sha1 file)' - method_option :force_download, - :type => :boolean, - :default => false, - :desc => 'Force download of the artifact even if it exists' - method_option :verify_sha1, - :type => :boolean, - :default => true, - :desc => 'Validates sha1 sum' - desc 'pull_kp_server_war ', 'Pulls Kill Pay server war from Sonatype and places it on your machine. If version was not specified it uses the latest released version.' - def pull_kp_server_war(version='LATEST') - installer = BaseInstaller.new(logger, - options[:overrides], - options[:ssl_verify]) - response = installer.install_killbill_server(KillbillServerArtifact::KILLBILL_GROUP_ID, - KillbillServerArtifact::KILLPAY_ARTIFACT_ID, - KillbillServerArtifact::KILLPAY_PACKAGING, - KillbillServerArtifact::KILLPAY_CLASSIFIER, - version, - options[:destination], - options[:bundles_dir], - options[:force_download], - options[:verify_sha1]) - say "Artifact has been retrieved and can be found at path: #{response[:file_path]}", :green - end - - desc 'search_for_kp_server', 'Searches for all versions of Kill Pay server and prints them to the screen.' - def search_for_kp_server - say "Available versions: #{KillbillServerArtifact.versions(KillbillServerArtifact::KILLPAY_ARTIFACT_ID, - KillbillServerArtifact::KILLPAY_PACKAGING, - KillbillServerArtifact::KILLPAY_CLASSIFIER, - options[:overrides], - options[:ssl_verify]).to_a.join(', ')}", :green - end - method_option :group_id, - :type => :string, - :default => KillbillPluginArtifact::KILLBILL_JAVA_PLUGIN_GROUP_ID, - :desc => 'The plugin artifact group-id' + type: :string, + default: KillbillPluginArtifact::KILLBILL_JAVA_PLUGIN_GROUP_ID, + desc: 'The plugin artifact group-id' method_option :artifact_id, - :type => :string, - :default => nil, - :desc => 'The plugin artifact id' + type: :string, + default: nil, + desc: 'The plugin artifact id' method_option :version, - :type => :string, - :default => nil, - :desc => 'The plugin artifact version' + type: :string, + default: nil, + desc: 'The plugin artifact version' method_option :packaging, - :type => :string, - :default => KillbillPluginArtifact::KILLBILL_JAVA_PLUGIN_PACKAGING, - :desc => 'The plugin artifact packaging' + type: :string, + default: KillbillPluginArtifact::KILLBILL_JAVA_PLUGIN_PACKAGING, + desc: 'The plugin artifact packaging' method_option :classifier, - :type => :string, - :default => KillbillPluginArtifact::KILLBILL_JAVA_PLUGIN_CLASSIFIER, - :desc => 'The plugin artifact classifier' + type: :string, + default: KillbillPluginArtifact::KILLBILL_JAVA_PLUGIN_CLASSIFIER, + desc: 'The plugin artifact classifier' method_option :from_source_file, - :type => :string, - :default => nil, - :desc => 'Specify the plugin jar that should be used for the installation.' + type: :string, + default: nil, + desc: 'Specify the plugin jar that should be used for the installation.' method_option :destination, - :type => :string, - :default => nil, - :desc => 'A different folder other than the current working directory.' + type: :string, + default: nil, + desc: 'A different folder other than the current working directory.' method_option :force_download, - :type => :boolean, - :default => false, - :desc => 'Force download of the artifact even if it exists' + type: :boolean, + default: false, + desc: 'Force download of the artifact even if it exists' method_option :sha1_file, - :type => :string, - :default => nil, - :desc => 'Location of the sha1 file' + type: :string, + default: nil, + desc: 'Location of the sha1 file' method_option :verify_sha1, - :type => :boolean, - :default => true, - :desc => 'Validates sha1 sum' - desc 'install_java_plugin plugin-key ', 'Pulls a java plugin from Sonatype and installs it under the specified destination. If the kb-version has been specified, it is used to download the matching plugin artifact version; if not, it uses the specified plugin version or if null, the LATEST one.' - def install_java_plugin(plugin_key, kb_version='LATEST') - - + type: :boolean, + default: true, + desc: 'Validates sha1 sum' + desc 'install_java_plugin plugin-key ', 'Pulls a java plugin and installs it under the specified destination. If the kb-version has been specified, it is used to download the matching plugin artifact version; if not, it uses the specified plugin version or if null, the LATEST one.' + def install_java_plugin(plugin_key, kb_version = 'LATEST') installer = BaseInstaller.new(logger, options[:overrides], options[:ssl_verify]) - if options[:from_source_file].nil? - response = installer.install_plugin(plugin_key, + response = if options[:from_source_file].nil? + installer.install_plugin(plugin_key, kb_version, options[:group_id], options[:artifact_id], @@ -200,64 +173,61 @@ def install_java_plugin(plugin_key, kb_version='LATEST') options[:force_download], options[:verify_sha1], false) - else - response = installer.install_plugin_from_fs(plugin_key, options[:from_source_file], nil, options[:version], options[:destination], 'java') - end + else + installer.install_plugin_from_fs(plugin_key, options[:from_source_file], nil, options[:version], options[:destination], 'java') + end say "Artifact has been retrieved and can be found at path: #{response[:file_path]}", :green end - - - method_option :group_id, - :type => :string, - :default => KillbillPluginArtifact::KILLBILL_RUBY_PLUGIN_GROUP_ID, - :desc => 'The plugin artifact group-id' + type: :string, + default: KillbillPluginArtifact::KILLBILL_RUBY_PLUGIN_GROUP_ID, + desc: 'The plugin artifact group-id' method_option :artifact_id, - :type => :string, - :default => nil, - :desc => 'The plugin artifact id' + type: :string, + default: nil, + desc: 'The plugin artifact id' method_option :version, - :type => :string, - :default => nil, - :desc => 'The plugin artifact version' + type: :string, + default: nil, + desc: 'The plugin artifact version' method_option :packaging, - :type => :string, - :default => KillbillPluginArtifact::KILLBILL_RUBY_PLUGIN_PACKAGING, - :desc => 'The plugin artifact packaging' + type: :string, + default: KillbillPluginArtifact::KILLBILL_RUBY_PLUGIN_PACKAGING, + desc: 'The plugin artifact packaging' method_option :classifier, - :type => :string, - :default => KillbillPluginArtifact::KILLBILL_RUBY_PLUGIN_CLASSIFIER, - :desc => 'The plugin artifact classifier' + type: :string, + default: KillbillPluginArtifact::KILLBILL_RUBY_PLUGIN_CLASSIFIER, + desc: 'The plugin artifact classifier' method_option :from_source_file, - :type => :string, - :default => nil, - :desc => 'Specify the ruby plugin archive that should be used for the installation.' + type: :string, + default: nil, + desc: 'Specify the ruby plugin archive that should be used for the installation.' method_option :destination, - :type => :string, - :default => nil, - :desc => 'A different folder other than the current working directory.' + type: :string, + default: nil, + desc: 'A different folder other than the current working directory.' method_option :force_download, - :type => :boolean, - :default => false, - :desc => 'Force download of the artifact even if it exists' + type: :boolean, + default: false, + desc: 'Force download of the artifact even if it exists' method_option :sha1_file, - :type => :string, - :default => nil, - :desc => 'Location of the sha1 file' + type: :string, + default: nil, + desc: 'Location of the sha1 file' method_option :verify_sha1, - :type => :boolean, - :default => true, - :desc => 'Validates sha1 sum' - desc 'install_ruby_plugin plugin-key ', 'Pulls a ruby plugin from Sonatype and installs it under the specified destination. If the kb-version has been specified, it is used to download the matching plugin artifact version; if not, it uses the specified plugin version or if null, the LATEST one.' - def install_ruby_plugin(plugin_key, kb_version='LATEST') + type: :boolean, + default: true, + desc: 'Validates sha1 sum' + desc 'install_ruby_plugin plugin-key ', 'Pulls a ruby plugin and installs it under the specified destination. If the kb-version has been specified, it is used to download the matching plugin artifact version; if not, it uses the specified plugin version or if null, the LATEST one.' + def install_ruby_plugin(plugin_key, kb_version = 'LATEST') installer = BaseInstaller.new(logger, - options[:overrides], - options[:ssl_verify]) + options[:overrides], + options[:ssl_verify]) - if options[:from_source_file].nil? - response = installer.install_plugin(plugin_key, + response = if options[:from_source_file].nil? + installer.install_plugin(plugin_key, kb_version, options[:group_id], options[:artifact_id], @@ -269,28 +239,27 @@ def install_ruby_plugin(plugin_key, kb_version='LATEST') options[:force_download], options[:verify_sha1], true) - else - response = installer.install_plugin_from_fs(plugin_key, options[:from_source_file], nil, nil, options[:destination], 'ruby') - end + else + installer.install_plugin_from_fs(plugin_key, options[:from_source_file], nil, nil, options[:destination], 'ruby') + end say "Artifact has been retrieved and can be found at path: #{response[:file_path]}", :green - end method_option :destination, - :type => :string, - :default => nil, - :desc => 'A different folder other than the default bundles directory.' + type: :string, + default: nil, + desc: 'A different folder other than the default bundles directory.' method_option :force_download, - :type => :boolean, - :default => false, - :desc => 'Force download of the artifact even if it exists' + type: :boolean, + default: false, + desc: 'Force download of the artifact even if it exists' method_option :verify_sha1, - :type => :boolean, - :default => true, - :desc => 'Validates sha1 sum' - desc 'pull_defaultbundles ', 'Pulls the default OSGI bundles from Sonatype and places it on your machine. If the kb-version has been specified, it is used to download the matching platform artifact; if not, it uses the latest released version.' - def pull_defaultbundles(kb_version='LATEST') + type: :boolean, + default: true, + desc: 'Validates sha1 sum' + desc 'pull_defaultbundles ', 'Pulls the default OSGI bundles and places it on your machine. If the kb-version has been specified, it is used to download the matching platform artifact; if not, it uses the latest released version.' + def pull_defaultbundles(kb_version = 'LATEST') response = BaseInstaller.new(logger, options[:overrides], options[:ssl_verify]) @@ -318,23 +287,23 @@ def search_for_plugins end method_option :destination, - :type => :string, - :default => nil, - :desc => 'A different folder other than the current working directory.' + type: :string, + default: nil, + desc: 'A different folder other than the current working directory.' method_option :force_download, - :type => :boolean, - :default => false, - :desc => 'Force download of the artifact even if it exists' + type: :boolean, + default: false, + desc: 'Force download of the artifact even if it exists' method_option :sha1_file, - :type => :string, - :default => nil, - :desc => 'Location of the sha1 file' + type: :string, + default: nil, + desc: 'Location of the sha1 file' method_option :verify_sha1, - :type => :boolean, - :default => true, - :desc => 'Validates sha1 sum' - desc 'pull_kaui_war ', 'Pulls Kaui war from Sonatype and places it on your machine. If version was not specified it uses the latest released version.' - def pull_kaui_war(version='LATEST') + type: :boolean, + default: true, + desc: 'Validates sha1 sum' + desc 'pull_kaui_war ', 'Pulls Kaui war and places it on your machine. If version was not specified it uses the latest released version.' + def pull_kaui_war(version = 'LATEST') response = KauiArtifact.pull(logger, KauiArtifact::KAUI_GROUP_ID, KauiArtifact::KAUI_ARTIFACT_ID, @@ -356,36 +325,54 @@ def search_for_kaui end method_option :version, - :type => :string, - :default => 'LATEST', - :desc => 'Kill Bill version' + type: :string, + default: 'LATEST', + desc: 'Kill Bill version' + method_option :force_download, + type: :boolean, + default: false, + desc: 'Ignore local cache' + method_option :sha1_file, + type: :string, + default: nil, + desc: 'Location of the sha1 file' + method_option :verify_sha1, + type: :boolean, + default: true, + desc: 'Validates sha1 sum' + method_option :as_json, + type: :boolean, + default: false, + desc: 'Set the output format as JSON when true' desc 'info', 'Describe information about a Kill Bill version' def info - - say "Fetching info for version #{options[:version]}...\n" - versions_info = KillbillServerArtifact.info(options[:version], - options[:overrides], - options[:ssl_verify]) - say "Dependencies for version #{options[:version]}\n " + (versions_info.map {|k,v| "#{k} #{v}"}).join("\n "), :green - say "\n\n" - + options[:sha1_file], + options[:force_download], + options[:verify_sha1], + options[:overrides], + options[:ssl_verify]) resolved_kb_version = versions_info['killbill'] - kb_version = resolved_kb_version.split('.').slice(0,2).join(".") + kb_version = resolved_kb_version.split('.').slice(0, 2).join('.') plugins_info = KPM::PluginsDirectory.list_plugins(true, kb_version) - say "Known plugin for KB version #{options[:version]}\n " + (plugins_info.map {|k,v| "#{k} #{v}"}).join("\n "), :green + if options[:as_json] + puts({ 'killbill' => versions_info, 'plugins' => plugins_info }.to_json) + else + say "Dependencies for version #{options[:version]}\n " + (versions_info.map { |k, v| "#{k} #{v}" }).join("\n "), :green + say "Known plugins for KB version #{options[:version]}\n " + (plugins_info.map { |k, v| "#{k} #{v}" }).join("\n "), :green + end end method_option :destination, - :type => :string, - :default => nil, - :desc => 'Folder where to download migration files.' + type: :string, + default: nil, + desc: 'Folder where to download migration files.' method_option :token, - :type => :string, - :default => nil, - :desc => 'GitHub OAuth token.' + type: :string, + default: nil, + desc: 'GitHub OAuth token.' desc 'migrations repository from to', 'Download migration files for Kill Bill or a plugin' def migrations(repository, from, to = nil) full_repo = repository.include?('/') ? repository : "killbill/#{repository}" @@ -394,324 +381,264 @@ def migrations(repository, from, to = nil) end method_option :destination, - :type => :string, - :default => nil, - :desc => 'A different folder other than the default bundles directory.' + type: :string, + default: nil, + desc: 'A different folder other than the default bundles directory.' + method_option :as_json, + type: :boolean, + default: false, + desc: 'Set the output format as JSON when true' desc 'inspect', 'Inspect current deployment' def inspect inspector = KPM::Inspector.new - puts options[:destination] all_plugins = inspector.inspect(options[:destination]) - #puts all_plugins.to_json - inspector.format(all_plugins) + options[:as_json] ? puts(all_plugins.to_json) : inspector.format(all_plugins) end method_option :bundles_dir, - :type => :string, - :default => nil, - :desc => 'A different folder other than the default bundles directory.' + type: :string, + default: nil, + desc: 'A different folder other than the default bundles directory.' method_option :config_file, - :type => :string, - :default => nil, - :desc => 'KPM configuration file (yml file)' + type: :string, + default: nil, + desc: 'KPM configuration file (yml file)' method_option :as_json, - :type => :boolean, - :default => false, - :desc => 'Set the output format as JSON when true' + type: :boolean, + default: false, + desc: 'Set the output format as JSON when true' method_option :kaui_web_path, - :type => :string, - :default => nil, - :desc => 'Path for the KAUI web app' + type: :string, + default: nil, + desc: 'Path for the KAUI web app' method_option :killbill_web_path, - :type => :string, - :default => nil, - :desc => 'Path for the killbill web app' + type: :string, + default: nil, + desc: 'Path for the killbill web app' desc 'system', 'Gather information about the system' def system - system = KPM::System.new + system = KPM::System.new(logger) information = system.information(options[:bundles_dir], options[:as_json], options[:config_file], options[:kaui_web_path], - options[:killbill_web_path]) - - if options[:as_json] - puts information - end + options[:killbill_web_path]) + puts information if options[:as_json] end method_option :export, - :type => :string, - :default => nil, - :desc => 'export account for a provided id.' + type: :string, + default: nil, + desc: 'export account for a provided id.' method_option :import, - :type => :string, - :default => nil, - :desc => 'import account for a previously exported file.' + type: :string, + default: nil, + desc: 'import account for a previously exported file.' method_option :tenant_record_id, - :type => :numeric, - :default => nil, - :desc => 'replace the tenant_record_id before importing data.' + type: :numeric, + default: nil, + desc: 'replace the tenant_record_id before importing data.' method_option :generate_record_id, - :type => :boolean, - :default => false, - :desc => 'The generate_record_id will instruct to generate the tables record_ids that were exported' + type: :boolean, + default: false, + desc: 'The generate_record_id will instruct to generate the tables record_ids that were exported' method_option :skip_payment_methods, - :type => :boolean, - :default => false, - :desc => 'Skip or swap payment types other than __EXTERNAL_PAYMENT__.' + type: :boolean, + default: false, + desc: 'Skip or swap payment types other than __EXTERNAL_PAYMENT__.' method_option :config_file, - :type => :string, - :default => nil, - :desc => 'Yml that contains killbill api connection and DB connection' + type: :string, + default: nil, + desc: 'Yml that contains killbill api connection and DB connection' method_option :killbill_api_credentials, - :type => :array, - :default => nil, - :desc => 'Killbill api credentials ' + type: :array, + default: nil, + desc: 'Killbill api credentials ' method_option :killbill_credentials, - :type => :array, - :default => nil, - :desc => 'Killbill credentials ' + type: :array, + default: nil, + desc: 'Killbill credentials ' method_option :killbill_url, - :type => :string, - :default => nil, - :desc => 'Killbill URL ex. http://127.0.0.1:8080' + type: :string, + default: nil, + desc: 'Killbill URL ex. http://127.0.0.1:8080' method_option :database_name, - :type => :string, - :default => nil, - :desc => 'DB name to connect' + type: :string, + default: nil, + desc: 'DB name to connect' method_option :database_credentials, - :type => :array, - :default => nil, - :desc => 'DB credentials ' + type: :array, + default: nil, + desc: 'DB credentials ' method_option :data_delimiter, - :type => :string, - :default => "|", - :desc => 'Data delimiter' + type: :string, + default: '|', + desc: 'Data delimiter' method_option :database_host, - :type => :string, - :default => nil, - :desc => 'Database Host name' + type: :string, + default: nil, + desc: 'Database Host name' method_option :database_port, - :type => :string, - :default => nil, - :desc => 'Database port' + type: :string, + default: nil, + desc: 'Database port' desc 'account', 'export/import accounts' def account - logger.info 'Please wait processing the request!!!' - begin - config_file = nil - if options[:killbill_url] && /https?:\/\/[\S]+/.match(options[:killbill_url]).nil? - raise Interrupt,'--killbill_url, required format -> http(s)://something' - end - - if options[:killbill_api_credentials] && options[:killbill_api_credentials].size != 2 - raise Interrupt,'--killbill_api_credentials, required format -> ' - end + config_file = nil + raise Interrupt, '--killbill_url, required format -> http(s)://something' if options[:killbill_url] && %r{https?://[\S]+}.match(options[:killbill_url]).nil? - if options[:killbill_credentials] && options[:killbill_credentials].size != 2 - raise Interrupt,'--killbill_credentials, required format -> ' - end + raise Interrupt, '--killbill_api_credentials, required format -> ' if options[:killbill_api_credentials] && options[:killbill_api_credentials].size != 2 - if options[:database_credentials] && options[:database_credentials].size != 2 - raise Interrupt,'--database_credentials, required format -> ' - end + raise Interrupt, '--killbill_credentials, required format -> ' if options[:killbill_credentials] && options[:killbill_credentials].size != 2 - if options[:database_name] && options[:database_name] == :database_name.to_s - raise Interrupt,'--database_credentials, please provide a valid database name' - end - - if options[:config_file] && options[:config_file] == :config_file.to_s - config_file = File.join(File.expand_path(File.dirname(__FILE__)), 'account_export_import.yml') - end + raise Interrupt, '--database_credentials, required format -> ' if options[:database_credentials] && options[:database_credentials].size != 2 - if options[:export].nil? && options[:import].nil? - raise Interrupt,'Need to specify an action' - end + raise Interrupt, '--database_credentials, please provide a valid database name' if options[:database_name] && options[:database_name] == :database_name.to_s + config_file = File.join(__dir__, 'account_export_import.yml') if options[:config_file] && options[:config_file] == :config_file.to_s - account = KPM::Account.new(config_file || options[:config_file],options[:killbill_api_credentials],options[:killbill_credentials], - options[:killbill_url],options[:database_name],options[:database_credentials],options[:database_host], options[:database_port],options[:data_delimiter], logger) - export_file = nil - round_trip_export_import = false + raise Interrupt, 'Need to specify an action' if options[:export].nil? && options[:import].nil? - if not options[:export].nil? - export_file = account.export_data(options[:export]) - round_trip_export_import = true - end + account = KPM::Account.new(config_file || options[:config_file], options[:killbill_api_credentials], options[:killbill_credentials], + options[:killbill_url], options[:database_name], options[:database_credentials], options[:database_host], options[:database_port], options[:data_delimiter], logger) + export_file = nil + round_trip_export_import = false - if not options[:import].nil? - account.import_data(export_file || options[:import],options[:tenant_record_id], options[:skip_payment_methods], - round_trip_export_import, options[:generate_record_id]) - end + unless options[:export].nil? + export_file = account.export_data(options[:export]) + round_trip_export_import = true + end - rescue Exception => e - logger.error "\e[91;1m#{e.message}\e[0m" - if not e.is_a?(Interrupt) - logger.error e.backtrace.join("\n") - end + unless options[:import].nil? + account.import_data(export_file || options[:import], options[:tenant_record_id], options[:skip_payment_methods], + round_trip_export_import, options[:generate_record_id]) end + rescue StandardError => e + logger.error "\e[91;1m#{e.message}\e[0m" + logger.error e.backtrace.join("\n") unless e.is_a?(Interrupt) end method_option :key_prefix, - :type => :string, - :default => nil, - :enum => KPM::TenantConfig::KEY_PREFIXES, - :desc => 'Retrieve a per tenant key value based on key prefix' + type: :string, + default: nil, + enum: KPM::TenantConfig::KEY_PREFIXES, + desc: 'Retrieve a per tenant key value based on key prefix' method_option :killbill_api_credentials, - :type => :array, - :default => nil, - :desc => 'Killbill api credentials ' + type: :array, + default: nil, + desc: 'Killbill api credentials ' method_option :killbill_credentials, - :type => :array, - :default => nil, - :desc => 'Killbill credentials ' + type: :array, + default: nil, + desc: 'Killbill credentials ' method_option :killbill_url, - :type => :string, - :default => nil, - :desc => 'Killbill URL ex. http://127.0.0.1:8080' + type: :string, + default: nil, + desc: 'Killbill URL ex. http://127.0.0.1:8080' desc 'tenant_config', 'export all tenant-level configs.' def tenant_config - logger.info 'Please wait processing the request!!!' - begin + raise Interrupt, '--killbill_url, required format -> http(s)://something' if options[:killbill_url] && %r{https?://[\S]+}.match(options[:killbill_url]).nil? - if options[:killbill_url] && /https?:\/\/[\S]+/.match(options[:killbill_url]).nil? - raise Interrupt,'--killbill_url, required format -> http(s)://something' - end - - if options[:killbill_api_credentials] && options[:killbill_api_credentials].size != 2 - raise Interrupt,'--killbill_api_credentials, required format -> ' - end - - if options[:killbill_credentials] && options[:killbill_credentials].size != 2 - raise Interrupt,'--killbill_credentials, required format -> ' - end + raise Interrupt, '--killbill_api_credentials, required format -> ' if options[:killbill_api_credentials] && options[:killbill_api_credentials].size != 2 - if options[:key_prefix] === :key_prefix.to_s - raise Interrupt, "--key_prefix, posible values #{KPM::TenantConfig::KEY_PREFIXES.join(', ')}" - end + raise Interrupt, '--killbill_credentials, required format -> ' if options[:killbill_credentials] && options[:killbill_credentials].size != 2 - tenantConfig = KPM::TenantConfig.new(options[:killbill_api_credentials],options[:killbill_credentials], - options[:killbill_url], logger) + raise Interrupt, "--key_prefix, posible values #{KPM::TenantConfig::KEY_PREFIXES.join(', ')}" if options[:key_prefix] == :key_prefix.to_s - tenantConfig.export(options[:key_prefix]) + tenant_config = KPM::TenantConfig.new(options[:killbill_api_credentials], options[:killbill_credentials], + options[:killbill_url], logger) - rescue Exception => e - logger.error "\e[91;1m#{e.message}\e[0m" - if not e.is_a?(Interrupt) - logger.error e.backtrace.join("\n") - end - end + tenant_config.export(options[:key_prefix]) + rescue StandardError => e + logger.error "\e[91;1m#{e.message}\e[0m" + logger.error e.backtrace.join("\n") unless e.is_a?(Interrupt) end - method_option :account_export, - :type => :string, - :default => nil, - :desc => 'export account for a provided id.' + type: :string, + default: nil, + desc: 'export account for a provided id.' method_option :log_dir, - :type => :string, - :default => nil, - :desc => '(Optional) Log directory if the default tomcat location has changed' + type: :string, + default: nil, + desc: '(Optional) Log directory if the default tomcat location has changed' method_option :config_file, - :type => :string, - :default => nil, - :desc => 'Yml that contains killbill api connection and DB connection' + type: :string, + default: nil, + desc: 'Yml that contains killbill api connection and DB connection' method_option :killbill_api_credentials, - :type => :array, - :default => nil, - :desc => 'Killbill api credentials ' + type: :array, + default: nil, + desc: 'Killbill api credentials ' method_option :killbill_credentials, - :type => :array, - :default => nil, - :desc => 'Killbill credentials ' + type: :array, + default: nil, + desc: 'Killbill credentials ' method_option :killbill_url, - :type => :string, - :default => nil, - :desc => 'Killbill URL ex. http://127.0.0.1:8080' + type: :string, + default: nil, + desc: 'Killbill URL ex. http://127.0.0.1:8080' method_option :database_name, - :type => :string, - :default => nil, - :desc => 'DB name to connect' + type: :string, + default: nil, + desc: 'DB name to connect' method_option :database_credentials, - :type => :array, - :default => nil, - :desc => 'DB credentials ' + type: :array, + default: nil, + desc: 'DB credentials ' method_option :database_host, - :type => :string, - :default => nil, - :desc => 'Database Host name' + type: :string, + default: nil, + desc: 'Database Host name' method_option :database_port, - :type => :string, - :default => nil, - :desc => 'Database port' + type: :string, + default: nil, + desc: 'Database port' method_option :kaui_web_path, - :type => :string, - :default => nil, - :desc => 'Path for the KAUI web app' + type: :string, + default: nil, + desc: 'Path for the KAUI web app' method_option :killbill_web_path, - :type => :string, - :default => nil, - :desc => 'Path for the killbill web app' + type: :string, + default: nil, + desc: 'Path for the killbill web app' method_option :bundles_dir, - :type => :string, - :default => nil, - :desc => 'A different folder other than the default bundles directory.' + type: :string, + default: nil, + desc: 'A different folder other than the default bundles directory.' desc 'diagnostic', 'exports and \'zips\' the account data, system, logs and tenant configurations' def diagnostic - logger.info 'Please wait processing the request!!!' - begin - if options[:account_export] && options[:account_export] == 'account_export' - raise Interrupt,'--account_export, please provide a valid account id' - end + raise Interrupt, '--account_export, please provide a valid account id' if options[:account_export] && options[:account_export] == 'account_export' - if options[:killbill_url] && /https?:\/\/[\S]+/.match(options[:killbill_url]).nil? - raise Interrupt,'--killbill_url, required format -> http(s)://something' - end + raise Interrupt, '--killbill_url, required format -> http(s)://something' if options[:killbill_url] && %r{https?://[\S]+}.match(options[:killbill_url]).nil? - if options[:killbill_api_credentials] && options[:killbill_api_credentials].size != 2 - raise Interrupt,'--killbill_api_credentials, required format -> ' - end + raise Interrupt, '--killbill_api_credentials, required format -> ' if options[:killbill_api_credentials] && options[:killbill_api_credentials].size != 2 - if options[:killbill_credentials] && options[:killbill_credentials].size != 2 - raise Interrupt,'--killbill_credentials, required format -> ' - end - - if options[:database_credentials] && options[:database_credentials].size != 2 - raise Interrupt,'--database_credentials, required format -> ' - end + raise Interrupt, '--killbill_credentials, required format -> ' if options[:killbill_credentials] && options[:killbill_credentials].size != 2 - if options[:database_name] && options[:database_name] == :database_name.to_s - raise Interrupt,'--database_credentials, please provide a valid database name' - end + raise Interrupt, '--database_credentials, required format -> ' if options[:database_credentials] && options[:database_credentials].size != 2 - if options[:kaui_web_path] && options[:kaui_web_path] == :kaui_web_path.to_s - raise Interrupt,'--kaui_web_path, please provide a valid kaui web path ' - end + raise Interrupt, '--database_credentials, please provide a valid database name' if options[:database_name] && options[:database_name] == :database_name.to_s - if options[:killbill_web_path] && options[:killbill_web_path] == :killbill_web_path.to_s - raise Interrupt,'--killbill_web_path, please provide a valid killbill web path' - end + raise Interrupt, '--kaui_web_path, please provide a valid kaui web path ' if options[:kaui_web_path] && options[:kaui_web_path] == :kaui_web_path.to_s - diagnostic = KPM::DiagnosticFile.new(options[:config_file],options[:killbill_api_credentials],options[:killbill_credentials], - options[:killbill_url],options[:database_name],options[:database_credentials], - options[:database_host], options[:database_port], options[:kaui_web_path], options[:killbill_web_path], options[:bundles_dir],logger) - diagnostic.export_data(options[:account_export], options[:log_dir]) + raise Interrupt, '--killbill_web_path, please provide a valid killbill web path' if options[:killbill_web_path] && options[:killbill_web_path] == :killbill_web_path.to_s - rescue Exception => e - logger.error "\e[91;1m#{e.message}\e[0m" - if not e.is_a?(Interrupt) - logger.error e.backtrace.join("\n") - end - end + diagnostic = KPM::DiagnosticFile.new(options[:config_file], options[:killbill_api_credentials], options[:killbill_credentials], + options[:killbill_url], options[:database_name], options[:database_credentials], + options[:database_host], options[:database_port], options[:kaui_web_path], options[:killbill_web_path], options[:bundles_dir], logger) + diagnostic.export_data(options[:account_export], options[:log_dir]) + rescue StandardError => e + logger.error "\e[91;1m#{e.message}\e[0m" + logger.error e.backtrace.join("\n") unless e.is_a?(Interrupt) end - map :pull_ruby_plugin => :install_ruby_plugin, - :pull_java_plugin => :install_java_plugin + map pull_ruby_plugin: :install_ruby_plugin, + pull_java_plugin: :install_java_plugin private def logger logger = ::Logger.new(STDOUT) - logger.level = Logger::INFO + logger.level = ENV['KPM_DEBUG'] ? Logger::DEBUG : Logger::INFO logger end end diff --git a/kpm/lib/kpm/tenant_config.rb b/kpm/lib/kpm/tenant_config.rb index 94f2867a..dd988e91 100644 --- a/kpm/lib/kpm/tenant_config.rb +++ b/kpm/lib/kpm/tenant_config.rb @@ -1,14 +1,14 @@ +# frozen_string_literal: true + require 'tmpdir' require 'json' require 'killbill_client' module KPM - class TenantConfig # Killbill server KILLBILL_HOST = ENV['KILLBILL_HOST'] || '127.0.0.1' - KILLBILL_URL = 'http://'.concat(KILLBILL_HOST).concat(':8080') - KILLBILL_API_VERSION = '1.0' + KILLBILL_URL = "http://#{KILLBILL_HOST}:8080" # USER/PWD KILLBILL_USER = ENV['KILLBILL_USER'] || 'admin' @@ -20,112 +20,97 @@ class TenantConfig # Temporary directory TMP_DIR_PEFIX = 'killbill' - TMP_DIR = Dir.mktmpdir(TMP_DIR_PEFIX); - - #Tenant key prefixes - KEY_PREFIXES = ['PLUGIN_CONFIG','PUSH_NOTIFICATION_CB','PER_TENANT_CONFIG', - 'PLUGIN_PAYMENT_STATE_MACHINE','CATALOG','OVERDUE_CONFIG', - 'INVOICE_TRANSLATION','CATALOG_TRANSLATION','INVOICE_TEMPLATE','INVOICE_MP_TEMPLATE'] - - + TMP_DIR = Dir.mktmpdir(TMP_DIR_PEFIX) + + # Tenant key prefixes + KEY_PREFIXES = %w[PLUGIN_CONFIG PUSH_NOTIFICATION_CB PER_TENANT_CONFIG + PLUGIN_PAYMENT_STATE_MACHINE CATALOG OVERDUE_CONFIG + INVOICE_TRANSLATION CATALOG_TRANSLATION INVOICE_TEMPLATE INVOICE_MP_TEMPLATE].freeze + def initialize(killbill_api_credentials = nil, killbill_credentials = nil, killbill_url = nil, logger = nil) @killbill_api_key = KILLBILL_API_KEY - @killbill_api_secrets = KILLBILL_API_SECRET + @killbill_api_secret = KILLBILL_API_SECRET @killbill_url = KILLBILL_URL @killbill_user = KILLBILL_USER @killbill_password = KILLBILL_PASSWORD @logger = logger - set_killbill_options(killbill_api_credentials,killbill_credentials,killbill_url) - + set_killbill_options(killbill_api_credentials, killbill_credentials, killbill_url) end - - def export(key_prefix = nil) + def export(key_prefix = nil) export_data = fetch_export_data(key_prefix) - - if export_data.size == 0 - raise Interrupt, 'key_prefix not found' - end - + + raise ArgumentError, "Data for key_prefix=#{key_prefix} not found" if export_data.empty? + export_file = store_into_file(export_data) - if not File.exist?(export_file) - raise Interrupt, 'key_prefix not found' - else - @logger.info "\e[32mData exported under #{export_file}\e[0m" - end + @logger.info "\e[32mData exported under #{export_file}\e[0m" export_file end - + private - - def fetch_export_data(key_prefix) - tenant_config = [] - pefixes = key_prefix.nil? ? KEY_PREFIXES : [key_prefix] - - pefixes.each do |prefix| - - config_data = call_client(prefix) - - if config_data.size > 0 - config_data.each {|data| tenant_config << data } - @logger.info "Data for key prefix \e[1m#{prefix.to_s}\e[0m was \e[1mfound and is ready to be exported\e[0m." - else - @logger.info "Data for key prefix \e[1m#{prefix.to_s}\e[0m was \e[31mnot found\e[0m." - end - end - - tenant_config - end - - def call_client(key_prefix) - - KillBillClient.url = @killbill_url - options = { - :username => @killbill_user, - :password => @killbill_password, - :api_key => @killbill_api_key, - :api_secret => @killbill_api_secrets - } - - tenant_config_data = KillBillClient::Model::Tenant.search_tenant_config(key_prefix, options) - - tenant_config_data - end - - def store_into_file(export_data) - export_file = TMP_DIR + File::SEPARATOR + 'kbdump' - File.open(export_file, 'w') { |io| io.puts export_data.to_json } + def fetch_export_data(key_prefix) + tenant_config = [] + pefixes = key_prefix.nil? ? KEY_PREFIXES : [key_prefix] - export_file + pefixes.each do |prefix| + config_data = call_client(prefix) + + if !config_data.empty? + config_data.each { |data| tenant_config << data } + @logger.debug "Data for key prefix \e[1m#{prefix}\e[0m was \e[1mfound and is ready to be exported\e[0m." + else + @logger.debug "Data for key prefix \e[1m#{prefix}\e[0m was \e[31mnot found\e[0m." + end end - - def set_killbill_options(killbill_api_credentials, killbill_credentials, killbill_url) - if not killbill_api_credentials.nil? + tenant_config + end - @killbill_api_key = killbill_api_credentials[0] - @killbill_api_secrets = killbill_api_credentials[1] + def call_client(key_prefix) + KillBillClient.url = @killbill_url + KillBillClient.logger = @logger + options = { + username: @killbill_user, + password: @killbill_password, + api_key: @killbill_api_key, + api_secret: @killbill_api_secret + } + + begin + return KillBillClient::Model::Tenant.search_tenant_config(key_prefix, options) + rescue KillBillClient::API::Unauthorized + raise ArgumentError, "Unable to export tenant details, wrong credentials? username=#{@killbill_user}, password=#{mask(@killbill_password)}, api_key=#{@killbill_api_key}, api_secret=#{mask(@killbill_api_secret)}" + end + end - end + def store_into_file(export_data) + export_file = TMP_DIR + File::SEPARATOR + 'kbdump' - if not killbill_credentials.nil? + File.open(export_file, 'w') { |io| io.puts export_data.to_json } - @killbill_user = killbill_credentials[0] - @killbill_password = killbill_credentials[1] + export_file + end - end + def set_killbill_options(killbill_api_credentials, killbill_credentials, killbill_url) + unless killbill_api_credentials.nil? + @killbill_api_key = killbill_api_credentials[0] + @killbill_api_secret = killbill_api_credentials[1] + end - if not killbill_url.nil? + unless killbill_credentials.nil? + @killbill_user = killbill_credentials[0] + @killbill_password = killbill_credentials[1] + end - @killbill_url = killbill_url + @killbill_url = killbill_url unless killbill_url.nil? + end - end - end - + def mask(string, all_but = 3, char = '*') + string.gsub(/.(?=.{#{all_but}})/, char) + end end - -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/tomcat_manager.rb b/kpm/lib/kpm/tomcat_manager.rb index 54a4ad7e..35b6d939 100644 --- a/kpm/lib/kpm/tomcat_manager.rb +++ b/kpm/lib/kpm/tomcat_manager.rb @@ -1,9 +1,10 @@ +# frozen_string_literal: true + require 'net/http' require 'uri' module KPM class TomcatManager - DOWNLOAD_URL = 'https://s3.amazonaws.com/kb-binaries/apache-tomcat-7.0.42.tar.gz' def initialize(tomcat_dir, logger) @@ -19,10 +20,10 @@ def download file = Pathname.new(dir).join('tomcat.tar.gz') @logger.info "Starting download of #{DOWNLOAD_URL} to #{file}" - Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| - File.open(file, 'wb+') do |file| + Net::HTTP.start(uri.host, uri.port, use_ssl: uri.scheme == 'https') do |http| + File.open(file, 'wb+') do |f| http.get(uri.path) do |body| - file.write(body) + f.write(body) end end end @@ -36,7 +37,7 @@ def download def setup # Remove default webapps - %w(ROOT docs examples host-manager manager).each do |webapp| + %w[ROOT docs examples host-manager manager].each do |webapp| FileUtils.rm_rf(@tomcat_dir.join('webapps').join(webapp)) end @@ -55,9 +56,9 @@ def setup def help "Tomcat installed at #{@tomcat_dir} -Start script: #{@tomcat_dir.join('bin').join('startup.sh').to_s} -Stop script: #{@tomcat_dir.join('bin').join('shutdown.sh').to_s} -Logs: #{@tomcat_dir.join('logs').to_s}" +Start script: #{@tomcat_dir.join('bin').join('startup.sh')} +Stop script: #{@tomcat_dir.join('bin').join('shutdown.sh')} +Logs: #{@tomcat_dir.join('logs')}" end private diff --git a/kpm/lib/kpm/trace_logger.rb b/kpm/lib/kpm/trace_logger.rb index a642d726..ab06ec69 100644 --- a/kpm/lib/kpm/trace_logger.rb +++ b/kpm/lib/kpm/trace_logger.rb @@ -1,13 +1,15 @@ +# frozen_string_literal: true + require 'json' module KPM class TraceLogger def initialize - @trace = Hash.new + @trace = {} end # Return JSON representation of the logs - def to_json + def to_json(*_args) @trace.to_json end @@ -21,32 +23,32 @@ def to_hash @trace end - def add(group=nil, key, message) - add_to_hash(group,key,message); + def add(group, key, message) + add_to_hash(group, key, message) end private - # This procedures will store the logs into a hash to be later returned - def add_to_hash(group=nil, key, message) + # This procedures will store the logs into a hash to be later returned + def add_to_hash(group, key, message) if group.nil? || key.nil? add_with_key(group || key, message) else container_key = group.to_sym - @trace[container_key] ||= Hash.new + @trace[container_key] ||= {} child_key = key.to_sym - unless @trace[container_key][child_key].nil? - child_is_an_array = @trace[container_key][child_key].kind_of?(Array) + if @trace[container_key][child_key].nil? + @trace[container_key][child_key] = message + else + child_is_an_array = @trace[container_key][child_key].is_a?(Array) old_message = nil old_message = @trace[container_key][child_key] unless child_is_an_array @trace[container_key][child_key] = [] unless child_is_an_array @trace[container_key][child_key].push(old_message) unless old_message.nil? @trace[container_key][child_key].push(message) - else - @trace[container_key][child_key] = message end end end @@ -54,17 +56,17 @@ def add_to_hash(group=nil, key, message) def add_with_key(key, message) child_key = key.to_sym - unless @trace[child_key].nil? - child_is_an_array = @trace[child_key].kind_of?(Array) + if @trace[child_key].nil? + @trace[child_key] = message + else + child_is_an_array = @trace[child_key].is_a?(Array) old_message = nil old_message = @trace[child_key] unless child_is_an_array @trace[child_key] = [] unless child_is_an_array @trace[child_key].push(old_message) unless old_message.nil? @trace[child_key].push(message) - else - @trace[child_key] = message end end end -end \ No newline at end of file +end diff --git a/kpm/lib/kpm/uninstaller.rb b/kpm/lib/kpm/uninstaller.rb index 2b649d8c..a53cf7b2 100644 --- a/kpm/lib/kpm/uninstaller.rb +++ b/kpm/lib/kpm/uninstaller.rb @@ -1,3 +1,7 @@ +# frozen_string_literal: true + +require 'pathname' + module KPM class Uninstaller def initialize(destination, logger = nil) @@ -7,21 +11,45 @@ def initialize(destination, logger = nil) @logger.level = Logger::INFO end - destination ||= KPM::BaseInstaller::DEFAULT_BUNDLES_DIR - @installed_plugins = Inspector.new.inspect(destination) + @destination = (destination || KPM::BaseInstaller::DEFAULT_BUNDLES_DIR) + refresh_installed_plugins - plugins_installation_path = File.join(destination, 'plugins') + plugins_installation_path = File.join(@destination, 'plugins') @plugins_manager = PluginsManager.new(plugins_installation_path, @logger) - sha1_file_path = File.join(destination, KPM::BaseInstaller::SHA1_FILENAME) + sha1_file_path = File.join(@destination, KPM::BaseInstaller::SHA1_FILENAME) @sha1checker = KPM::Sha1Checker.from_file(sha1_file_path, @logger) end - def uninstall_plugin(plugin, force = false) + def uninstall_plugin(plugin, force = false, version = nil) plugin_info = find_plugin(plugin) raise "No plugin with key/name '#{plugin}' found installed. Try running 'kpm inspect' for more info" unless plugin_info - remove_all_plugin_versions(plugin_info, force) + versions = version.nil? ? plugin_info[:versions].map { |artifact| artifact[:version] } : [version] + remove_plugin_versions(plugin_info, force, versions) + end + + def uninstall_non_default_plugins(dry_run = false) + plugins = categorize_plugins + + if plugins[:to_be_deleted].empty? + KPM.ui.say 'Nothing to do' + return false + end + + if dry_run + msg = "The following plugin versions would be removed:\n" + msg += plugins[:to_be_deleted].map { |p| " #{p[0][:plugin_name]}: #{p[1]}" }.join("\n") + msg += "\nThe following plugin versions would be kept:\n" + msg += plugins[:to_keep].map { |p| " #{p[0][:plugin_name]}: #{p[1]}" }.join("\n") + KPM.ui.say msg + false + else + plugins[:to_be_deleted].each do |p| + remove_plugin_version(p[0], p[1]) + end + true + end end private @@ -40,23 +68,48 @@ def find_plugin(plugin) plugin_info end - def remove_all_plugin_versions(plugin_info, force = false) - versions = plugin_info[:versions].map { |artifact| artifact[:version] } + def categorize_plugins + plugins = { to_be_deleted: [], to_keep: [] } + @installed_plugins.each do |_, info| + info[:versions].each do |artifact| + (artifact[:is_default] ? plugins[:to_keep] : plugins[:to_be_deleted]) << [info, artifact[:version]] + end + end + plugins + end + + def remove_plugin_versions(plugin_info, force = false, versions = []) KPM.ui.say "Removing the following versions of the #{plugin_info[:plugin_name]} plugin: #{versions.join(', ')}" if !force && versions.length > 1 - return false unless 'y' == KPM.ui.ask('Are you sure you want to continue?', limited_to: %w(y n)) + return false unless KPM.ui.ask('Are you sure you want to continue?', limited_to: %w[y n]) == 'y' end - FileUtils.rmtree(plugin_info[:plugin_path]) - - @plugins_manager.remove_plugin_identifier_key(plugin_info[:plugin_key]) versions.each do |version| - remove_sha1_entry(plugin_info, version) + remove_plugin_version(plugin_info, version) end - true end + def remove_plugin_version(plugin_info, version) + # Be safe + raise ArgumentError, 'plugin_path is empty' if plugin_info[:plugin_path].empty? + raise ArgumentError, "version is empty (plugin_path=#{plugin_info[:plugin_path]})" if version.empty? + + plugin_version_path = File.expand_path(File.join(plugin_info[:plugin_path], version)) + safe_rmrf(plugin_version_path) + + remove_sha1_entry(plugin_info, version) + + # Remove the identifier if this was the last version installed + refresh_installed_plugins + if @installed_plugins[plugin_info[:plugin_name]][:versions].empty? + safe_rmrf(plugin_info[:plugin_path]) + @plugins_manager.remove_plugin_identifier_key(plugin_info[:plugin_key]) + end + + refresh_installed_plugins + end + def remove_sha1_entry(plugin_info, version) coordinates = KPM::Coordinates.build_coordinates(group_id: plugin_info[:group_id], artifact_id: plugin_info[:artifact_id], @@ -65,5 +118,19 @@ def remove_sha1_entry(plugin_info, version) version: version) @sha1checker.remove_entry!(coordinates) end + + def refresh_installed_plugins + @installed_plugins = Inspector.new.inspect(@destination) + end + + def safe_rmrf(dir) + validate_dir_for_rmrf(dir) + FileUtils.rmtree(dir) + end + + def validate_dir_for_rmrf(dir) + raise ArgumentError, "Path #{dir} is not a valid directory" unless File.directory?(dir) + raise ArgumentError, "Path #{dir} is not a subdirectory of #{@destination}" unless Pathname.new(dir).fnmatch?(File.join(@destination, '**')) + end end end diff --git a/kpm/lib/kpm/utils.rb b/kpm/lib/kpm/utils.rb index d05789fe..5a49ecc8 100644 --- a/kpm/lib/kpm/utils.rb +++ b/kpm/lib/kpm/utils.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require 'pathname' require 'rubygems/package' require 'zlib' @@ -7,7 +9,7 @@ class Utils class << self TAR_LONGLINK = '././@LongLink' - def unpack_tgz(tar_gz_archive, destination, skip_top_dir=false) + def unpack_tgz(tar_gz_archive, destination, skip_top_dir = false) top_dir = nil Gem::Package::TarReader.new(Zlib::GzipReader.open(tar_gz_archive)) do |tar| dest = nil @@ -20,17 +22,17 @@ def unpack_tgz(tar_gz_archive, destination, skip_top_dir=false) if entry.directory? File.delete dest if File.file? dest - FileUtils.mkdir_p dest, :mode => entry.header.mode, :verbose => false + FileUtils.mkdir_p dest, mode: entry.header.mode, verbose: false elsif entry.file? FileUtils.rm_rf dest if File.directory? dest - FileUtils.mkdir_p File.dirname(dest), :verbose => false - File.open dest, "wb" do |f| + FileUtils.mkdir_p File.dirname(dest), verbose: false + File.open dest, 'wb' do |f| f.print entry.read end - FileUtils.chmod entry.header.mode, dest, :verbose => false + FileUtils.chmod entry.header.mode, dest, verbose: false current_dir = File.dirname(dest) # In case there are two top dirs, keep the last one by convention - top_dir = current_dir if (top_dir.nil? || top_dir.size >= current_dir.size) + top_dir = current_dir if top_dir.nil? || top_dir.size >= current_dir.size elsif entry.header.typeflag == '2' # Symlink File.symlink entry.header.linkname, dest end @@ -48,9 +50,7 @@ def peek_tgz_file_names(tar_gz_archive) file_names = [] Gem::Package::TarReader.new(Zlib::GzipReader.open(tar_gz_archive)) do |tar| tar.each do |entry| - if entry.file? - file_names.push entry.full_name - end + file_names.push entry.full_name if entry.file? end end @@ -62,17 +62,17 @@ def get_plugin_name_from_file_path(file_path) ver = get_version_from_file_path(file_path) ext = File.extname(base) - name = base.gsub(ext,'') + name = base.gsub(ext, '') if ver.nil? # this will remove SNAPSHOT and any dash that appear before it (ex --SNAPSHOT). - name = name.gsub(/((-+){,1}SNAPSHOT){,1}/,'') + name = name.gsub(/((-+){,1}SNAPSHOT){,1}/, '') last_dash = name.rindex('-') name = name[0..last_dash] unless last_dash.nil? else - name = name.gsub(ver,'') + name = name.gsub(ver, '') end - name = name[0..name.length-2] if name[-1].match(/[a-zA-z]/).nil? + name = name[0..name.length - 2] if name[-1].match(/[a-zA-z]/).nil? name end @@ -84,7 +84,6 @@ def get_version_from_file_path(file_path) ver[0] end - end end end diff --git a/kpm/lib/kpm/version.rb b/kpm/lib/kpm/version.rb index f64b3e99..e497fa96 100644 --- a/kpm/lib/kpm/version.rb +++ b/kpm/lib/kpm/version.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module KPM - VERSION = '0.6.5' + VERSION = '0.8.1' end diff --git a/kpm/packaging/Gemfile b/kpm/packaging/Gemfile index 8309d3b0..5b4a32cc 100644 --- a/kpm/packaging/Gemfile +++ b/kpm/packaging/Gemfile @@ -1,3 +1,5 @@ +# frozen_string_literal: true + source 'https://rubygems.org' gem 'kpm', '~> VERSION' diff --git a/kpm/pom.xml b/kpm/pom.xml index 848f037d..09fe7765 100644 --- a/kpm/pom.xml +++ b/kpm/pom.xml @@ -26,7 +26,7 @@ org.kill-bill.billing.installer kpm pom - 0.6.5 + 0.8.1 KPM http://github.com/killbill/killbill-cloud KPM: the Kill Bill Package Manager diff --git a/kpm/spec/kpm/remote/base_artifact_spec.rb b/kpm/spec/kpm/remote/base_artifact_spec.rb index 607a514f..36622d4a 100644 --- a/kpm/spec/kpm/remote/base_artifact_spec.rb +++ b/kpm/spec/kpm/remote/base_artifact_spec.rb @@ -1,7 +1,8 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::BaseArtifact do - before(:all) do @logger = Logger.new(STDOUT) @logger.level = Logger::INFO @@ -25,6 +26,23 @@ end end + it 'should be able to handle download errors' do + nexus_down = { url: 'https://does.not.exist' } + Dir.mktmpdir do |dir| + sha1_file = "#{dir}/sha1.yml" + test_download dir, 'foo-oss.pom.xml', false, false, sha1_file + # Verify we skip the second time + test_download dir, 'foo-oss.pom.xml', true, false, sha1_file + # Verify the download is skipped gracefully when Nexus isn't reachable + test_download dir, 'foo-oss.pom.xml', true, false, sha1_file, nexus_down + # Verify the download fails when Nexus isn't reachable and force_download is set + expect { test_download dir, 'foo-oss.pom.xml', nil, true, sha1_file, nexus_down }.to raise_error + # Verify the download fails when Nexus isn't reachable and the Nexus cache is empty + KPM::Sha1Checker.from_file(sha1_file).cache_artifact_info('org.kill-bill.billing:killbill-oss-parent:pom:0.143.33', nil) + expect { test_download dir, 'foo-oss.pom.xml', nil, false, sha1_file, nexus_down }.to raise_error + end + end + it 'should be able to download and verify generic .tar.gz artifacts' do # The artifact is not small unfortunately (23.7M) group_id = 'org.kill-bill.billing' @@ -38,11 +56,11 @@ info[:file_name].should be_nil files_in_dir = Dir[info[:file_path] + '/*'] - files_in_dir.size.should == 20 + files_in_dir.size.should eq 20 File.file?(info[:file_path] + '/killbill-osgi-bundles-jruby-0.11.3.jar').should be_true - info[:bundle_dir].should == info[:file_path] + info[:bundle_dir].should eq info[:file_path] end end @@ -62,19 +80,15 @@ second_take = KPM::BaseArtifact.pull(@logger, group_id, artifact_id, packaging, classifier, second_bundle_version, dir) File.file?(first_take[:file_path] + '/killbill-platform-osgi-bundles-jruby-0.36.2.jar').should be_false File.file?(second_take[:file_path] + '/killbill-platform-osgi-bundles-jruby-0.36.10.jar').should be_true - end end - - def test_download(dir, filename=nil, verify_is_skipped=false, force_download=false) + def test_download(dir, filename = nil, verify_is_skipped = false, force_download = false, sha1_file = nil, overrides = {}) path = filename.nil? ? dir : dir + '/' + filename - info = KPM::BaseArtifact.pull(@logger, 'org.kill-bill.billing', 'killbill-oss-parent', 'pom', nil, 'LATEST', path, nil, force_download, true, {}, true) - info[:file_name].should == (filename.nil? ? "killbill-oss-parent-#{info[:version]}.pom" : filename) - info[:skipped].should == verify_is_skipped - if !info[:skipped] - info[:size].should == File.size(info[:file_path]) - end + info = KPM::BaseArtifact.pull(@logger, 'org.kill-bill.billing', 'killbill-oss-parent', 'pom', nil, '0.143.33', path, sha1_file, force_download, true, overrides, true) + info[:file_name].should eq(filename.nil? ? "killbill-oss-parent-#{info[:version]}.pom" : filename) + info[:skipped].should eq verify_is_skipped + info[:size].should eq File.size(info[:file_path]) unless info[:skipped] end end diff --git a/kpm/spec/kpm/remote/base_installer_spec.rb b/kpm/spec/kpm/remote/base_installer_spec.rb index 2b947e8a..344e7b32 100644 --- a/kpm/spec/kpm/remote/base_installer_spec.rb +++ b/kpm/spec/kpm/remote/base_installer_spec.rb @@ -1,8 +1,9 @@ +# frozen_string_literal: true + require 'spec_helper' require 'json' describe KPM::BaseInstaller do - before(:all) do @logger = Logger.new(STDOUT) @logger.level = Logger::INFO @@ -13,7 +14,7 @@ bundles_dir = dir + '/bundles' installer = KPM::BaseInstaller.new(@logger) - info = installer.install_plugin('analytics', nil, nil, nil, nil, nil, '0.7.1', bundles_dir) + installer.install_plugin('analytics', nil, nil, nil, nil, nil, '0.7.1', bundles_dir) check_installation(bundles_dir) @@ -46,23 +47,23 @@ begin installer.install_plugin('invalid', nil, nil, nil, nil, nil, '1.2.3', bundles_dir) - fail "Should not succeed to install invalid plugin" - rescue ArgumentError => e + raise 'Should not succeed to install invalid plugin' + rescue ArgumentError + # Expected end end end it 'should extract plugin name from file path' do - [ - {:file_path => '/Somewhere/xxx-foo/target/xxx-1.0.0.jar', :expected => 'xxx'}, - {:file_path => '/Somewhere/xxx-foo/target/xxx-foo-bar-1.0.0.jar', :expected => 'xxx-foo-bar'}, - {:file_path => '/Somewhere/xxx-foo/target/xxx-foo-1.0.0.jar', :expected => 'xxx-foo'}, - {:file_path => '/Somewhere/xxx-foo/target/xxx-foo-1.0.0-SNAPSHOT.jar', :expected => 'xxx-foo'}, - {:file_path => '/Somewhere/xxx-foo/target/xxx-foo-1.0.jar', :expected => 'xxx-foo'}, - {:file_path => '/Somewhere/xxx-foo/target/xxx-foo-1.jar', :expected => 'xxx-foo'}, - {:file_path => '/Somewhere/xxx-foo/target/xxx-foo-abc-SNAPSHOT.jar', :expected => 'xxx-foo'}, - {:file_path => '/Somewhere/xxx-foo/target/xxx-foo-abc.jar', :expected => 'xxx-foo'} + { file_path: '/Somewhere/xxx-foo/target/xxx-1.0.0.jar', expected: 'xxx' }, + { file_path: '/Somewhere/xxx-foo/target/xxx-foo-bar-1.0.0.jar', expected: 'xxx-foo-bar' }, + { file_path: '/Somewhere/xxx-foo/target/xxx-foo-1.0.0.jar', expected: 'xxx-foo' }, + { file_path: '/Somewhere/xxx-foo/target/xxx-foo-1.0.0-SNAPSHOT.jar', expected: 'xxx-foo' }, + { file_path: '/Somewhere/xxx-foo/target/xxx-foo-1.0.jar', expected: 'xxx-foo' }, + { file_path: '/Somewhere/xxx-foo/target/xxx-foo-1.jar', expected: 'xxx-foo' }, + { file_path: '/Somewhere/xxx-foo/target/xxx-foo-abc-SNAPSHOT.jar', expected: 'xxx-foo' }, + { file_path: '/Somewhere/xxx-foo/target/xxx-foo-abc.jar', expected: 'xxx-foo' } ].each do |test| KPM::Utils.get_plugin_name_from_file_path(test[:file_path]).should eq test[:expected] end @@ -75,14 +76,14 @@ def check_installation(plugins_dir) plugin_identifiers = read_plugin_identifiers(plugins_dir) - plugin_identifiers.size.should == 1 + plugin_identifiers.size.should eq 1 - plugin_identifiers['analytics']['plugin_name'].should == 'analytics-plugin' - plugin_identifiers['analytics']['group_id'].should == 'org.kill-bill.billing.plugin.java' - plugin_identifiers['analytics']['artifact_id'].should == 'analytics-plugin' - plugin_identifiers['analytics']['packaging'].should == 'jar' - plugin_identifiers['analytics']['version'].should == '0.7.1' - plugin_identifiers['analytics']['language'].should == 'java' + plugin_identifiers['analytics']['plugin_name'].should eq 'analytics-plugin' + plugin_identifiers['analytics']['group_id'].should eq 'org.kill-bill.billing.plugin.java' + plugin_identifiers['analytics']['artifact_id'].should eq 'analytics-plugin' + plugin_identifiers['analytics']['packaging'].should eq 'jar' + plugin_identifiers['analytics']['version'].should eq '0.7.1' + plugin_identifiers['analytics']['language'].should eq 'java' File.file?(plugins_dir + '/plugins/java/analytics-plugin/0.7.1/tmp/disabled.txt').should be_false end @@ -92,26 +93,26 @@ def check_uninstallation(plugins_dir) plugin_identifiers = read_plugin_identifiers(plugins_dir) - plugin_identifiers.size.should == 0 + plugin_identifiers.size.should eq 0 File.file?(plugins_dir + '/plugins/java/analytics-plugin/0.7.1/tmp/disabled.txt').should be_true end def common_checks(plugins_dir) [ - plugins_dir, - plugins_dir + '/plugins', - plugins_dir + '/plugins/java', - plugins_dir + '/plugins/java/analytics-plugin', - plugins_dir + '/plugins/java/analytics-plugin/0.7.1', - plugins_dir + '/plugins/java/analytics-plugin/0.7.1/tmp', + plugins_dir, + plugins_dir + '/plugins', + plugins_dir + '/plugins/java', + plugins_dir + '/plugins/java/analytics-plugin', + plugins_dir + '/plugins/java/analytics-plugin/0.7.1', + plugins_dir + '/plugins/java/analytics-plugin/0.7.1/tmp' ].each do |dir| File.directory?(dir).should be_true end [ - plugins_dir + '/plugins/plugin_identifiers.json', - plugins_dir + '/plugins/java/analytics-plugin/0.7.1/analytics-plugin-0.7.1.jar' + plugins_dir + '/plugins/plugin_identifiers.json', + plugins_dir + '/plugins/java/analytics-plugin/0.7.1/analytics-plugin-0.7.1.jar' ].each do |file| File.file?(file).should be_true end diff --git a/kpm/spec/kpm/remote/installer_spec.rb b/kpm/spec/kpm/remote/installer_spec.rb index 85d3e7f8..b7fbd18a 100644 --- a/kpm/spec/kpm/remote/installer_spec.rb +++ b/kpm/spec/kpm/remote/installer_spec.rb @@ -1,8 +1,9 @@ +# frozen_string_literal: true + require 'spec_helper' require 'json' describe KPM::Installer do - before(:all) do @logger = Logger.new(STDOUT) @logger.level = Logger::INFO @@ -12,15 +13,15 @@ Dir.mktmpdir do |dir| kb_webapp_path = dir + '/KB_ROOT.war' installer = KPM::Installer.new({ - 'killbill' => { - 'webapp_path' => kb_webapp_path - } + 'killbill' => { + 'webapp_path' => kb_webapp_path + } }, @logger) # No exception response = nil - expect{ response = installer.install }.to_not raise_exception + expect { response = installer.install }.to_not raise_exception response = JSON[response] response['help'].should be_nil response['killbill']['status'].should eq 'INSTALLED' @@ -31,15 +32,15 @@ Dir.mktmpdir do |dir| kaui_webapp_path = dir + '/KAUI_ROOT.war' installer = KPM::Installer.new({ - 'kaui' => { - 'webapp_path' => kaui_webapp_path - } + 'kaui' => { + 'webapp_path' => kaui_webapp_path + } }, @logger) # No exception response = nil - expect{ response = installer.install }.to_not raise_exception + expect { response = installer.install }.to_not raise_exception response = JSON[response] response['help'].should be_nil response['kaui']['status'].should eq 'INSTALLED' @@ -52,28 +53,28 @@ kaui_webapp_path = dir + '/KAUI_ROOT.war' plugins_dir = dir + '/bundles' installer = KPM::Installer.new({ - 'killbill' => { - 'webapp_path' => kb_webapp_path, - 'plugins_dir' => plugins_dir, - 'plugins' => { - 'java' => [{ - 'name' => 'analytics', - 'version' => '0.7.1' - }], - 'ruby' => [{ - 'name' => 'payment-test-plugin', - 'artifact_id' => 'payment-test-plugin', - 'group_id' => 'org.kill-bill.billing.plugin.ruby', - 'version' => '1.8.7' - }, - { - 'name' => 'stripe' - }] - } - }, - 'kaui' => { - 'webapp_path' => kaui_webapp_path + 'killbill' => { + 'webapp_path' => kb_webapp_path, + 'plugins_dir' => plugins_dir, + 'plugins' => { + 'java' => [{ + 'name' => 'analytics', + 'version' => '0.7.1' + }, { + 'name' => 'stripe', + 'version' => '7.0.0' + }], + 'ruby' => [{ + 'name' => 'payment-test-plugin', + 'artifact_id' => 'payment-test-plugin', + 'group_id' => 'org.kill-bill.billing.plugin.ruby', + 'version' => '1.8.7' + }] } + }, + 'kaui' => { + 'webapp_path' => kaui_webapp_path + } }, @logger) @@ -86,11 +87,10 @@ # Finally verify that for both (well behaved) ruby and java plugin, skipping the install will still correctly return the `:bundle_dir` info = installer.install_plugin('payment-test-plugin', nil, 'org.kill-bill.billing.plugin.ruby', 'payment-test-plugin', nil, nil, '1.8.7', plugins_dir) - info[:bundle_dir].should == plugins_dir + '/plugins/ruby/killbill-payment-test/1.8.7' - + info[:bundle_dir].should eq plugins_dir + '/plugins/ruby/killbill-payment-test/1.8.7' - info = installer.install_plugin('analytics', nil, nil, nil, nil, nil, '0.7.1', plugins_dir) - info[:bundle_dir].should == plugins_dir + '/plugins/java/analytics-plugin/0.7.1' + info = installer.install_plugin('analytics', nil, nil, nil, nil, nil, '0.7.1', plugins_dir) + info[:bundle_dir].should eq plugins_dir + '/plugins/java/analytics-plugin/0.7.1' end end @@ -98,27 +98,29 @@ def check_installation(plugins_dir, kb_webapp_path, kaui_webapp_path) [ - plugins_dir, - plugins_dir + '/platform', - plugins_dir + '/plugins', - plugins_dir + '/plugins/java', - plugins_dir + '/plugins/java/analytics-plugin', - plugins_dir + '/plugins/java/analytics-plugin/0.7.1', - plugins_dir + '/plugins/ruby', - plugins_dir + '/plugins/ruby/killbill-payment-test', - plugins_dir + '/plugins/ruby/killbill-payment-test/1.8.7', - plugins_dir + '/plugins/ruby/killbill-stripe' + plugins_dir, + plugins_dir + '/platform', + plugins_dir + '/plugins', + plugins_dir + '/plugins/java', + plugins_dir + '/plugins/java/analytics-plugin', + plugins_dir + '/plugins/java/analytics-plugin/0.7.1', + plugins_dir + '/plugins/java/stripe-plugin', + plugins_dir + '/plugins/java/stripe-plugin/7.0.0', + plugins_dir + '/plugins/ruby', + plugins_dir + '/plugins/ruby/killbill-payment-test', + plugins_dir + '/plugins/ruby/killbill-payment-test/1.8.7' ].each do |dir| File.directory?(dir).should be_true end [ - kb_webapp_path, - kaui_webapp_path, - plugins_dir + '/platform/jruby.jar', - plugins_dir + '/plugins/plugin_identifiers.json', - plugins_dir + '/plugins/java/analytics-plugin/0.7.1/analytics-plugin-0.7.1.jar', - plugins_dir + '/plugins/ruby/killbill-payment-test/1.8.7/killbill.properties' + kb_webapp_path, + kaui_webapp_path, + plugins_dir + '/platform/jruby.jar', + plugins_dir + '/plugins/plugin_identifiers.json', + plugins_dir + '/plugins/java/analytics-plugin/0.7.1/analytics-plugin-0.7.1.jar', + plugins_dir + '/plugins/java/stripe-plugin/7.0.0/stripe-plugin-7.0.0.jar', + plugins_dir + '/plugins/ruby/killbill-payment-test/1.8.7/killbill.properties' ].each do |file| File.file?(file).should be_true end @@ -127,27 +129,27 @@ def check_installation(plugins_dir, kb_webapp_path, kaui_webapp_path) JSON.parse(f.read) end - plugin_identifiers.size.should == 3 - - plugin_identifiers['analytics']['plugin_name'].should == 'analytics-plugin' - plugin_identifiers['analytics']['group_id'].should == 'org.kill-bill.billing.plugin.java' - plugin_identifiers['analytics']['artifact_id'].should == 'analytics-plugin' - plugin_identifiers['analytics']['packaging'].should == 'jar' - plugin_identifiers['analytics']['version'].should == '0.7.1' - plugin_identifiers['analytics']['language'].should == 'java' - - plugin_identifiers['payment-test-plugin']['plugin_name'].should == 'killbill-payment-test' - plugin_identifiers['payment-test-plugin']['group_id'].should == 'org.kill-bill.billing.plugin.ruby' - plugin_identifiers['payment-test-plugin']['artifact_id'].should == 'payment-test-plugin' - plugin_identifiers['payment-test-plugin']['packaging'].should == 'tar.gz' - plugin_identifiers['payment-test-plugin']['version'].should == '1.8.7' - plugin_identifiers['payment-test-plugin']['language'].should == 'ruby' - - plugin_identifiers['stripe']['plugin_name'].should == 'killbill-stripe' - plugin_identifiers['stripe']['group_id'].should == 'org.kill-bill.billing.plugin.ruby' - plugin_identifiers['stripe']['artifact_id'].should == 'stripe-plugin' - plugin_identifiers['stripe']['packaging'].should == 'tar.gz' - plugin_identifiers['stripe']['version'].should >= '4.0.0' - plugin_identifiers['stripe']['language'].should == 'ruby' + plugin_identifiers.size.should eq 3 + + plugin_identifiers['analytics']['plugin_name'].should eq 'analytics-plugin' + plugin_identifiers['analytics']['group_id'].should eq 'org.kill-bill.billing.plugin.java' + plugin_identifiers['analytics']['artifact_id'].should eq 'analytics-plugin' + plugin_identifiers['analytics']['packaging'].should eq 'jar' + plugin_identifiers['analytics']['version'].should eq '0.7.1' + plugin_identifiers['analytics']['language'].should eq 'java' + + plugin_identifiers['stripe']['plugin_name'].should eq 'stripe-plugin' + plugin_identifiers['stripe']['group_id'].should eq 'org.kill-bill.billing.plugin.java' + plugin_identifiers['stripe']['artifact_id'].should eq 'stripe-plugin' + plugin_identifiers['stripe']['packaging'].should eq 'jar' + plugin_identifiers['stripe']['version'].should eq '7.0.0' + plugin_identifiers['stripe']['language'].should eq 'java' + + plugin_identifiers['payment-test-plugin']['plugin_name'].should eq 'killbill-payment-test' + plugin_identifiers['payment-test-plugin']['group_id'].should eq 'org.kill-bill.billing.plugin.ruby' + plugin_identifiers['payment-test-plugin']['artifact_id'].should eq 'payment-test-plugin' + plugin_identifiers['payment-test-plugin']['packaging'].should eq 'tar.gz' + plugin_identifiers['payment-test-plugin']['version'].should eq '1.8.7' + plugin_identifiers['payment-test-plugin']['language'].should eq 'ruby' end end diff --git a/kpm/spec/kpm/remote/kaui_artifact_spec.rb b/kpm/spec/kpm/remote/kaui_artifact_spec.rb index 233d0fa3..cc6a3485 100644 --- a/kpm/spec/kpm/remote/kaui_artifact_spec.rb +++ b/kpm/spec/kpm/remote/kaui_artifact_spec.rb @@ -1,7 +1,8 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::KillbillPluginArtifact do - before(:all) do @logger = Logger.new(STDOUT) @logger.level = Logger::INFO @@ -16,15 +17,15 @@ KPM::BaseArtifact::KAUI_CLASSIFIER, 'LATEST', dir) - info[:file_name].should == "kaui-standalone-#{info[:version]}.war" - info[:size].should == File.size(info[:file_path]) + info[:file_name].should eq "kaui-standalone-#{info[:version]}.war" + info[:size].should eq File.size(info[:file_path]) end end it 'should be able to list versions' do versions = KPM::KauiArtifact.versions.to_a - versions.size.should >= 2 - versions[0].should == '0.0.1' - versions[1].should == '0.0.2' + expect(versions.size).to be >= 2 + versions[0].should eq '0.0.1' + versions[1].should eq '0.0.2' end end diff --git a/kpm/spec/kpm/remote/killbill_plugin_artifact_spec.rb b/kpm/spec/kpm/remote/killbill_plugin_artifact_spec.rb index 0071b1e7..dde854db 100644 --- a/kpm/spec/kpm/remote/killbill_plugin_artifact_spec.rb +++ b/kpm/spec/kpm/remote/killbill_plugin_artifact_spec.rb @@ -1,7 +1,8 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::KillbillPluginArtifact do - before(:all) do @logger = Logger.new(STDOUT) @logger.level = Logger::INFO @@ -22,12 +23,12 @@ info[:file_name].should be_nil files_in_dir = Dir[info[:file_path] + '/*'] - files_in_dir.size.should == 1 - files_in_dir[0].should == info[:file_path] + '/killbill-payment-test' + files_in_dir.size.should eq 1 + files_in_dir[0].should eq info[:file_path] + '/killbill-payment-test' - File.read(info[:file_path] + '/killbill-payment-test/1.8.7/killbill.properties').should == "mainClass=PaymentTest::PaymentPlugin\nrequire=payment_test\npluginType=PAYMENT\n" + File.read(info[:file_path] + '/killbill-payment-test/1.8.7/killbill.properties').should eq "mainClass=PaymentTest::PaymentPlugin\nrequire=payment_test\npluginType=PAYMENT\n" - info[:bundle_dir].should == info[:file_path] + '/killbill-payment-test/1.8.7' + info[:bundle_dir].should eq info[:file_path] + '/killbill-payment-test/1.8.7' end end @@ -43,8 +44,8 @@ 'killbill-analytics', dir, sha1_file) - info[:file_name].should == "analytics-plugin-#{info[:version]}.jar" - info[:size].should == File.size(info[:file_path]) + info[:file_name].should eq "analytics-plugin-#{info[:version]}.jar" + info[:size].should eq File.size(info[:file_path]) check_yaml_for_resolved_latest_version(sha1_file, 'org.kill-bill.billing.plugin.java:analytics-plugin:jar', '3.0.0') end @@ -66,9 +67,6 @@ check_yaml_for_resolved_latest_version(sha1_file, 'org.kill-bill.billing.plugin.ruby:logging-plugin:tar.gz', '3.0.0') end - - - end it 'should be able to list versions' do @@ -77,16 +75,16 @@ versions[:java].should_not be_nil versions[:java]['analytics-plugin'].should_not be_nil logging_plugin_versions = versions[:java]['analytics-plugin'].to_a - logging_plugin_versions.size.should >= 3 - logging_plugin_versions[0].should == '0.6.0' - logging_plugin_versions[1].should == '0.7.0' - logging_plugin_versions[2].should == '0.7.1' + expect(logging_plugin_versions.size).to be >= 3 + logging_plugin_versions[0].should eq '0.6.0' + logging_plugin_versions[1].should eq '0.7.0' + logging_plugin_versions[2].should eq '0.7.1' versions[:ruby].should_not be_nil versions[:ruby]['logging-plugin'].should_not be_nil logging_plugin_versions = versions[:ruby]['logging-plugin'].to_a - logging_plugin_versions.size.should >= 1 - logging_plugin_versions[0].should == '1.7.0' + expect(logging_plugin_versions.size).to be >= 1 + logging_plugin_versions[0].should eq '1.7.0' end private @@ -95,17 +93,14 @@ # (we can't check against actual version because as we keep releasing those increment, # so the best we can do it check this is *not* LATEST and greater than current version at the time the test was written ) def check_yaml_for_resolved_latest_version(sha1_file, key_prefix, minimum_version) - sha1_checker = KPM::Sha1Checker.from_file(sha1_file) - keys = sha1_checker.all_sha1.keys.select { |k| k.start_with? key_prefix} - keys.size.should == 1 + keys = sha1_checker.all_sha1.keys.select { |k| k.start_with? key_prefix } + keys.size.should eq 1 parts = keys[0].split(':') - parts.size.should == 4 - parts[3].should_not == 'LATEST' - parts[3].should >= minimum_version + parts.size.should eq 4 + parts[3].should_not eq 'LATEST' + expect(parts[3]).to be >= minimum_version end - - end diff --git a/kpm/spec/kpm/remote/killbill_server_artifact_spec.rb b/kpm/spec/kpm/remote/killbill_server_artifact_spec.rb index c2d00d7e..6697f5df 100644 --- a/kpm/spec/kpm/remote/killbill_server_artifact_spec.rb +++ b/kpm/spec/kpm/remote/killbill_server_artifact_spec.rb @@ -1,7 +1,8 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::KillbillServerArtifact do - before(:all) do @logger = Logger.new(STDOUT) @logger.level = Logger::INFO @@ -17,25 +18,41 @@ KPM::BaseArtifact::KILLBILL_CLASSIFIER, 'LATEST', dir) - info[:file_name].should == "killbill-profiles-killbill-#{info[:version]}.war" - info[:size].should == File.size(info[:file_path]) + info[:file_name].should eq "killbill-profiles-killbill-#{info[:version]}.war" + info[:size].should eq File.size(info[:file_path]) end end it 'should be able to list versions' do versions = KPM::KillbillServerArtifact.versions(KPM::BaseArtifact::KILLBILL_ARTIFACT_ID).to_a - versions.size.should >= 2 - versions[0].should == '0.11.10' - versions[1].should == '0.11.11' + expect(versions.size).to be >= 2 + versions[0].should eq '0.11.10' + versions[1].should eq '0.11.11' end it 'should get dependencies information' do - info = KPM::KillbillServerArtifact.info('0.15.9') - info['killbill'].should == '0.15.9' - info['killbill-oss-parent'].should == '0.62' - info['killbill-api'].should == '0.27' - info['killbill-plugin-api'].should == '0.16' - info['killbill-commons'].should == '0.10' - info['killbill-platform'].should == '0.13' + nexus_down = { url: 'https://does.not.exist' } + + Dir.mktmpdir do |dir| + sha1_file = "#{dir}/sha1.yml" + info = KPM::KillbillServerArtifact.info('0.15.9', sha1_file) + info['killbill'].should eq '0.15.9' + info['killbill-oss-parent'].should eq '0.62' + info['killbill-api'].should eq '0.27' + info['killbill-plugin-api'].should eq '0.16' + info['killbill-commons'].should eq '0.10' + info['killbill-platform'].should eq '0.13' + KPM::Sha1Checker.from_file(sha1_file).killbill_info('0.15.9').should eq info + + # Verify the download is skipped gracefully when Nexus isn't reachable + KPM::KillbillServerArtifact.info('0.15.9', sha1_file, false, nil, nexus_down) + + # Verify the download fails when Nexus isn't reachable and force_download is set + expect { KPM::KillbillServerArtifact.info('0.15.9', sha1_file, true, nil, nexus_down) }.to raise_error + + # Verify the download fails when Nexus isn't reachable and the Nexus cache is empty + KPM::Sha1Checker.from_file(sha1_file).cache_killbill_info('0.15.9', nil) + expect { KPM::KillbillServerArtifact.info('0.15.9', sha1_file, false, nil, nexus_down) }.to raise_error + end end end diff --git a/kpm/spec/kpm/remote/migrations_spec.rb b/kpm/spec/kpm/remote/migrations_spec.rb index 548d970c..06c9de9e 100644 --- a/kpm/spec/kpm/remote/migrations_spec.rb +++ b/kpm/spec/kpm/remote/migrations_spec.rb @@ -1,18 +1,19 @@ -require 'spec_helper' +# frozen_string_literal: true -describe KPM::Migrations, :skip_me_if_nil => ENV['TOKEN'].nil? do +require 'spec_helper' +describe KPM::Migrations, skip_me_if_nil: ENV['TOKEN'].nil? do context 'plugins' do it 'should be able to find migrations for a java plugin' do migrations = KPM::Migrations.new('analytics-plugin-3.0.2', nil, 'killbill/killbill-analytics-plugin', ENV['TOKEN']).migrations # No migration yet - migrations.size.should == 0 + migrations.size.should eq 0 end it 'should be able to find migrations for a ruby plugin' do migrations = KPM::Migrations.new('master', nil, 'killbill/killbill-cybersource-plugin', ENV['TOKEN']).migrations # No migration yet - migrations.size.should == 1 + migrations.size.should eq 1 end end @@ -20,19 +21,19 @@ it 'should be able to find migrations between two versions' do migrations = KPM::Migrations.new('killbill-0.16.3', 'killbill-0.16.4', 'killbill/killbill', ENV['TOKEN']).migrations - migrations.size.should == 1 - migrations.first[:name].should == 'V20160324060345__revisit_payment_methods_indexes_509.sql' - migrations.first[:sql].should == "drop index payment_methods_active_accnt on payment_methods;\n" + migrations.size.should eq 1 + migrations.first[:name].should eq 'V20160324060345__revisit_payment_methods_indexes_509.sql' + migrations.first[:sql].should eq "drop index payment_methods_active_accnt on payment_methods;\n" - KPM::Migrations.new('master', 'master', 'killbill/killbill', ENV['TOKEN']).migrations.size.should == 0 + KPM::Migrations.new('master', 'master', 'killbill/killbill', ENV['TOKEN']).migrations.size.should eq 0 end it 'should be able to find migrations for a given version' do migrations = KPM::Migrations.new('killbill-0.16.4', nil, 'killbill/killbill', ENV['TOKEN']).migrations - migrations.size.should == 1 - migrations.first[:name].should == 'V20160324060345__revisit_payment_methods_indexes_509.sql' - migrations.first[:sql].should == "drop index payment_methods_active_accnt on payment_methods;\n" + migrations.size.should eq 1 + migrations.first[:name].should eq 'V20160324060345__revisit_payment_methods_indexes_509.sql' + migrations.first[:sql].should eq "drop index payment_methods_active_accnt on payment_methods;\n" end end end diff --git a/kpm/spec/kpm/remote/nexus_facade_spec.rb b/kpm/spec/kpm/remote/nexus_facade_spec.rb index 594b6e34..febd60cb 100644 --- a/kpm/spec/kpm/remote/nexus_facade_spec.rb +++ b/kpm/spec/kpm/remote/nexus_facade_spec.rb @@ -1,50 +1,55 @@ +# frozen_string_literal: true + require 'spec_helper' require 'rexml/document' describe KPM::NexusFacade do - - let(:coordinates_map){ {:version => '0.1.4', - :group_id => 'org.kill-bill.billing', - :artifact_id => 'killbill-platform-osgi-api', - :packaging => 'jar', - :classifier => nil} } - let(:coordinates_with_classifier_map){ {:version => '0.1.1', - :group_id => 'org.kill-bill.billing', - :artifact_id => 'killbill-platform-osgi-bundles-jruby', - :packaging => 'jar', - :classifier => 'javadoc'} } - let(:coordinates) { KPM::Coordinates.build_coordinates(coordinates_map)} - let(:coordinates_with_classifier) { KPM::Coordinates.build_coordinates(coordinates_with_classifier_map)} - let(:nexus_remote) { described_class::RemoteFactory.create(nil, true)} + let(:coordinates_map) do + { version: '0.1.4', + group_id: 'org.kill-bill.billing', + artifact_id: 'killbill-platform-osgi-api', + packaging: 'jar', + classifier: nil } + end + let(:coordinates_with_classifier_map) do + { version: '0.1.1', + group_id: 'org.kill-bill.billing', + artifact_id: 'killbill-platform-osgi-bundles-jruby', + packaging: 'jar', + classifier: 'javadoc' } + end + let(:coordinates) { KPM::Coordinates.build_coordinates(coordinates_map) } + let(:coordinates_with_classifier) { KPM::Coordinates.build_coordinates(coordinates_with_classifier_map) } + let(:nexus_remote) { described_class::RemoteFactory.create(nil, true) } it 'when searching for artifacts' do response = nil - expect{ response = nexus_remote.search_for_artifacts(coordinates) }.not_to raise_exception - expect(REXML::Document.new(response).elements["//artifactId"].text).to eq(coordinates_map[:artifact_id]) + expect { response = nexus_remote.search_for_artifacts(coordinates) }.not_to raise_exception + expect(REXML::Document.new(response).elements['//artifactId'].text).to eq(coordinates_map[:artifact_id]) end it 'when searching for artifact with classifier' do response = nil - expect{ response = nexus_remote.search_for_artifacts(coordinates_with_classifier) }.not_to raise_exception - expect(REXML::Document.new(response).elements["//artifactId"].text).to eq(coordinates_with_classifier_map[:artifact_id]) + expect { response = nexus_remote.search_for_artifacts(coordinates_with_classifier) }.not_to raise_exception + expect(REXML::Document.new(response).elements['//artifactId'].text).to eq(coordinates_with_classifier_map[:artifact_id]) end it 'when getting artifact info' do response = nil - expect{ response = nexus_remote.get_artifact_info(coordinates) }.not_to raise_exception - expect(REXML::Document.new(response).elements["//version"].text).to eq(coordinates_map[:version]) + expect { response = nexus_remote.get_artifact_info(coordinates) }.not_to raise_exception + expect(REXML::Document.new(response).elements['//version'].text).to eq(coordinates_map[:version]) end it 'when getting artifact info with classifier' do response = nil - expect{ response = nexus_remote.get_artifact_info(coordinates_with_classifier) }.not_to raise_exception - expect(REXML::Document.new(response).elements["//version"].text).to eq(coordinates_with_classifier_map[:version]) + expect { response = nexus_remote.get_artifact_info(coordinates_with_classifier) }.not_to raise_exception + expect(REXML::Document.new(response).elements['//version'].text).to eq(coordinates_with_classifier_map[:version]) end it 'when pull artifact' do response = nil destination = Dir.mktmpdir('artifact') - expect{ response = nexus_remote.pull_artifact(coordinates,destination) }.not_to raise_exception + expect { response = nexus_remote.pull_artifact(coordinates, destination) }.not_to raise_exception destination = File.join(File.expand_path(destination), response[:file_name]) expect(File.exist?(destination)).to be_true end @@ -52,9 +57,8 @@ it 'when pull artifact with classifier' do response = nil destination = Dir.mktmpdir('artifact') - expect{ response = nexus_remote.pull_artifact(coordinates_with_classifier,destination) }.not_to raise_exception + expect { response = nexus_remote.pull_artifact(coordinates_with_classifier, destination) }.not_to raise_exception destination = File.join(File.expand_path(destination), response[:file_name]) expect(File.exist?(destination)).to be_true end - -end \ No newline at end of file +end diff --git a/kpm/spec/kpm/remote/tenant_config_spec.rb b/kpm/spec/kpm/remote/tenant_config_spec.rb index edeb214b..37d531d6 100644 --- a/kpm/spec/kpm/remote/tenant_config_spec.rb +++ b/kpm/spec/kpm/remote/tenant_config_spec.rb @@ -1,24 +1,29 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::TenantConfig do include_context 'connection_setup' - let(:value) {"\n\n 2017-04-25T15:57:43Z\n DEFAULT\n IN_ADVANCE\n \n \n \n \n \n \n IMMEDIATE\n \n \n \n \n START_OF_BUNDLE\n \n \n \n \n IMMEDIATE\n \n \n \n \n START_OF_BUNDLE\n \n \n \n \n ACCOUNT\n \n \n \n \n DEFAULT\n \n \n \n \n \n \n \n \n \n\n"} - let(:key) {'CATALOG_RSPEC'} - - let(:user) {'KPM Tenant Spec'} - let(:tenant_config_class) { described_class.new([killbill_api_key,killbill_api_secrets], - [killbill_user, killbill_password],url,logger)} - let(:options){{ - :username => killbill_user, - :password => killbill_password, - :api_key => killbill_api_key, - :api_secret => killbill_api_secrets - }} - + let(:value) { "\n\n 2017-04-25T15:57:43Z\n DEFAULT\n IN_ADVANCE\n \n \n \n \n \n \n IMMEDIATE\n \n \n \n \n START_OF_BUNDLE\n \n \n \n \n IMMEDIATE\n \n \n \n \n START_OF_BUNDLE\n \n \n \n \n ACCOUNT\n \n \n \n \n DEFAULT\n \n \n \n \n \n \n \n \n \n\n" } + let(:key) { 'CATALOG_RSPEC' } + + let(:user) { 'KPM Tenant Spec' } + let(:tenant_config_class) do + described_class.new([killbill_api_key, killbill_api_secret], + [killbill_user, killbill_password], url, logger) + end + let(:options) do + { + username: killbill_user, + password: killbill_password, + api_key: killbill_api_key, + api_secret: killbill_api_secret + } + end + describe '#initialize' do context 'when creating an instance of tenant config class' do - it 'when initialized with defaults' do expect(described_class.new).to be_an_instance_of(KPM::TenantConfig) end @@ -26,33 +31,29 @@ it 'when initialized with options' do tenant_config_class.should be_an_instance_of(KPM::TenantConfig) expect(tenant_config_class.instance_variable_get(:@killbill_api_key)).to eq(killbill_api_key) - expect(tenant_config_class.instance_variable_get(:@killbill_api_secrets)).to eq(killbill_api_secrets) + expect(tenant_config_class.instance_variable_get(:@killbill_api_secret)).to eq(killbill_api_secret) expect(tenant_config_class.instance_variable_get(:@killbill_user)).to eq(killbill_user) expect(tenant_config_class.instance_variable_get(:@killbill_password)).to eq(killbill_password) expect(tenant_config_class.instance_variable_get(:@killbill_url)).to eq(url) - end - end + end - end - describe '#export' do it 'when retrieving tenant configuration' do KillBillClient.url = url - #Add a new tenant config + # Add a new tenant config tenant_config = KillBillClient::Model::Tenant.upload_tenant_user_key_value(key, value, user, nil, nil, options) expect(tenant_config.key).to eq(key) - - #get created tenant config + + # get created tenant config export_file = tenant_config_class.export(key) expect(File.exist?(export_file)).to be_true expect(File.readlines(export_file).grep(/#{key}/)).to be_true - - #remove created tenant config + + # remove created tenant config KillBillClient::Model::Tenant.delete_tenant_user_key_value(key, user, nil, nil, options) - end end -end \ No newline at end of file +end diff --git a/kpm/spec/kpm/remote/tomcat_manager_spec.rb b/kpm/spec/kpm/remote/tomcat_manager_spec.rb index 0bcb48ed..e190dc21 100644 --- a/kpm/spec/kpm/remote/tomcat_manager_spec.rb +++ b/kpm/spec/kpm/remote/tomcat_manager_spec.rb @@ -1,7 +1,8 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::TomcatManager do - before(:all) do @logger = Logger.new(STDOUT) @logger.level = Logger::INFO diff --git a/kpm/spec/kpm/unit/actions_spec.rb b/kpm/spec/kpm/unit/actions_spec.rb new file mode 100644 index 00000000..e63ca52b --- /dev/null +++ b/kpm/spec/kpm/unit/actions_spec.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe KPM::NexusFacade::Actions do + subject { described_class.new({}, nil, logger) } + let(:logger) { Logger.new(STDOUT) } + let(:nexus_mock) { double(KPM::NexusFacade::NexusApiCallsV2) } + + before do + KPM::NexusFacade::NexusApiCallsV2.stub(:new).and_return(nexus_mock) + end + + context 'when Nexus throws a non-retryable exception' do + it 'never retries' do + calls = 0 + expect do + subject.send(:retry_exceptions, 'foo') do + calls += 1 + raise StandardError, '404' + end + end.to raise_error(StandardError) + expect(calls).to eq(1) + end + end + + context 'when Nexus throws a retryable exception' do + it 'retries until giving up' do + calls = 0 + expect do + subject.send(:retry_exceptions, 'foo') do + calls += 1 + raise KPM::NexusFacade::UnexpectedStatusCodeException, 503 + end + end.to raise_error(StandardError) + expect(calls).to eq(3) + end + end + + context 'when networking is flaky' do + it 'retries until call succeeds' do + calls = 0 + expect(subject.send(:retry_exceptions, 'foo') do + calls += 1 + raise OpenSSL::SSL::SSLErrorWaitReadable if calls < 2 + + true + end).to be_true + expect(calls).to eq(2) + end + end +end diff --git a/kpm/spec/kpm/unit/base_artifact_spec.rb b/kpm/spec/kpm/unit/base_artifact_spec.rb index 18533416..727b1d82 100644 --- a/kpm/spec/kpm/unit/base_artifact_spec.rb +++ b/kpm/spec/kpm/unit/base_artifact_spec.rb @@ -1,7 +1,8 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::BaseArtifact do - before(:all) do @logger = Logger.new(STDOUT) @logger.level = Logger::INFO @@ -15,14 +16,14 @@ info[:skipped].should be_false info[:is_tgz].should be_false - info[:repository_path].should == file_path - info[:dir_name].should == dir - info[:bundle_dir].should == dir - info[:file_name].should == 'sha1_test.yml' + info[:repository_path].should eq file_path + info[:dir_name].should eq dir + info[:bundle_dir].should eq dir + info[:file_name].should eq 'sha1_test.yml' files_in_dir = Dir[dir + '/*'] - files_in_dir.size.should == 1 - files_in_dir[0].should == info[:file_path] + files_in_dir.size.should eq 1 + files_in_dir[0].should eq info[:file_path] end end @@ -75,18 +76,18 @@ def check_fs_info(specified_destination_path, repository_path, is_tgz, version, expected_dir_name, expected_file_name, expected_file_path) info = { - :repository_path => repository_path, - :is_tgz => is_tgz, - :version => version + repository_path: repository_path, + is_tgz: is_tgz, + version: version } KPM::BaseArtifact.send('populate_fs_info', info, specified_destination_path) - info[:repository_path].should == repository_path - info[:is_tgz].should == is_tgz - info[:version].should == version - info[:dir_name].should == expected_dir_name - info[:file_name].should == expected_file_name - info[:file_path].should == expected_file_path + info[:repository_path].should eq repository_path + info[:is_tgz].should eq is_tgz + info[:version].should eq version + info[:dir_name].should eq expected_dir_name + info[:file_name].should eq expected_file_name + info[:file_path].should eq expected_file_path end end diff --git a/kpm/spec/kpm/unit/cpu_information_spec.rb b/kpm/spec/kpm/unit/cpu_information_spec.rb new file mode 100644 index 00000000..821f0293 --- /dev/null +++ b/kpm/spec/kpm/unit/cpu_information_spec.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'kpm/system_helpers/system_proxy' +require 'kpm/system_helpers/cpu_information' + +describe KPM::SystemProxy::CpuInformation do + subject { described_class.new } + let(:cpu_info) { subject.send(:build_hash, data) } + + context 'when running on Linux' do + let(:data) { "processor: 0\nvendor_id: GenuineIntel\ncpu family: 6\nmodel: 78\nmodel name: Intel(R) Core(TM) i5-6287U CPU @ 3.10GHz\nstepping: 3\ncpu MHz: 3096.000\ncache size: 4096 KB\nphysical id: 0\nsiblings: 2\ncore id: 0\ncpu cores: 2\napicid: 0\ninitial apicid: 0\nfpu: yes\nfpu_exception: yes\ncpuid level: 22\nwp: yes\nflags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc pni pclmulqdq ssse3 cx16 pcid sse4_1 sse4_2 movbe popcnt aes xsave avx rdrand hypervisor lahf_lm abm 3dnowprefetch fsgsbase avx2 invpcid rdseed clflushopt\nbugs:\nbogomips: 6192.00\nclflush size: 64\ncache_alignment: 64\naddress sizes: 39 bits physical, 48 bits virtual\npower management:\n\nprocessor: 1\nvendor_id: GenuineIntel\ncpu family: 6\nmodel: 78\nmodel name: Intel(R) Core(TM) i5-6287U CPU @ 3.10GHz\nstepping: 3\ncpu MHz: 3096.000\ncache size: 4096 KB\nphysical id: 0\nsiblings: 2\ncore id: 1\ncpu cores: 2\napicid: 1\ninitial apicid: 1\nfpu: yes\nfpu_exception: yes\ncpuid level: 22\nwp: yes\nflags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc pni pclmulqdq ssse3 cx16 pcid sse4_1 sse4_2 movbe popcnt aes xsave avx rdrand hypervisor lahf_lm abm 3dnowprefetch fsgsbase avx2 invpcid rdseed clflushopt\nbugs:\nbogomips: 6192.00\nclflush size: 64\ncache_alignment: 64\naddress sizes: 39 bits physical, 48 bits virtual\npower management:\n\n" } + + it { + expect(subject.labels).to eq([{ label: :cpu_detail }, + { label: :value }]) + } + + it { + expect(cpu_info).to eq({ 'processor' => { cpu_detail: 'processor', value: '1' }, + 'vendor_id' => { cpu_detail: 'vendor_id', value: 'GenuineIntel' }, + 'cpu family' => { cpu_detail: 'cpu family', value: '6' }, + 'model' => { cpu_detail: 'model', value: '78' }, + 'model name' => + { cpu_detail: 'model name', + value: 'Intel(R) Core(TM) i5-6287U CPU @ 3.10GHz' }, + 'stepping' => { cpu_detail: 'stepping', value: '3' }, + 'cpu MHz' => { cpu_detail: 'cpu MHz', value: '3096.000' }, + 'cache size' => { cpu_detail: 'cache size', value: '4096 KB' }, + 'physical id' => { cpu_detail: 'physical id', value: '0' }, + 'siblings' => { cpu_detail: 'siblings', value: '2' }, + 'core id' => { cpu_detail: 'core id', value: '1' }, + 'cpu cores' => { cpu_detail: 'cpu cores', value: '2' }, + 'apicid' => { cpu_detail: 'apicid', value: '1' }, + 'initial apicid' => { cpu_detail: 'initial apicid', value: '1' }, + 'fpu' => { cpu_detail: 'fpu', value: 'yes' }, + 'fpu_exception' => { cpu_detail: 'fpu_exception', value: 'yes' }, + 'cpuid level' => { cpu_detail: 'cpuid level', value: '22' }, + 'wp' => { cpu_detail: 'wp', value: 'yes' }, + 'bugs' => { cpu_detail: 'bugs', value: '' }, + 'bogomips' => { cpu_detail: 'bogomips', value: '6192.00' }, + 'clflush size' => { cpu_detail: 'clflush size', value: '64' }, + 'cache_alignment' => { cpu_detail: 'cache_alignment', value: '64' }, + 'address sizes' => + { cpu_detail: 'address sizes', value: '39 bits physical, 48 bits virtual' }, + 'power management' => { cpu_detail: 'power management', value: '' } }) + } + end + + context 'when running on MacOS' do + let(:data) { " Processor Name: Intel Core i5\n Processor Speed: 3.1 GHz\n Number of Processors: 1\n Total Number of Cores: 2\n L2 Cache (per Core): 256 KB\n L3 Cache: 4 MB\n" } + + it { + expect(subject.labels).to eq([{ label: :cpu_detail }, + { label: :value }]) + } + + it { + expect(cpu_info).to eq({ 'Processor Name' => { cpu_detail: 'Processor Name', value: 'Intel Core i5' }, + 'Processor Speed' => { cpu_detail: 'Processor Speed', value: '3.1 GHz' }, + 'Number of Processors' => { cpu_detail: 'Number of Processors', value: '1' }, + 'Total Number of Cores' => { cpu_detail: 'Total Number of Cores', value: '2' }, + 'L2 Cache (per Core)' => { cpu_detail: 'L2 Cache (per Core)', value: '256 KB' }, + 'L3 Cache' => { cpu_detail: 'L3 Cache', value: '4 MB' } }) + } + end +end diff --git a/kpm/spec/kpm/unit/disk_space_information_spec.rb b/kpm/spec/kpm/unit/disk_space_information_spec.rb new file mode 100644 index 00000000..84e5885a --- /dev/null +++ b/kpm/spec/kpm/unit/disk_space_information_spec.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'kpm/system_helpers/system_proxy' +require 'kpm/system_helpers/disk_space_information' + +describe KPM::SystemProxy::DiskSpaceInformation do + subject { described_class.new } + let(:data_keys) { [] } + let!(:disk_space_info) { subject.send(:build_hash, data, cols_count, true, data_keys) } + + context 'when running on Linux' do + let(:cols_count) { 5 } + let(:data) { "Filesystem 1K-blocks Used Available Use% Mounted on\nnone 58419028 24656532 30723884 45% /\ntmpfs 65536 0 65536 0% /dev\ntmpfs 5387012 0 5387012 0% /sys/fs/cgroup\n/dev/sda1 58419028 24656532 30723884 45% /etc/hosts\nshm 65536 0 65536 0% /dev/shm\ntmpfs 5387012 0 5387012 0% /sys/firmware\n" } + + it { + expect(data_keys).to eq(['Filesystem', '1K-blocks', 'Used', 'Available', 'Use%', 'Mounted on']) + } + + it { + expect(disk_space_info).to eq({ 'DiskInfo_2' => { :Filesystem => 'none', :"1K-blocks" => '58419028', :Used => '24656532', :Available => '30723884', :"Use%" => '45%', :Mounted_on => '/' }, + 'DiskInfo_3' => { :Filesystem => 'tmpfs', :"1K-blocks" => '65536', :Used => '0', :Available => '65536', :"Use%" => '0%', :Mounted_on => '/dev' }, + 'DiskInfo_4' => { :Filesystem => 'tmpfs', :"1K-blocks" => '5387012', :Used => '0', :Available => '5387012', :"Use%" => '0%', :Mounted_on => '/sys/fs/cgroup' }, + 'DiskInfo_5' => { :Filesystem => '/dev/sda1', :"1K-blocks" => '58419028', :Used => '24656532', :Available => '30723884', :"Use%" => '45%', :Mounted_on => '/etc/hosts' }, + 'DiskInfo_6' => { :Filesystem => 'shm', :"1K-blocks" => '65536', :Used => '0', :Available => '65536', :"Use%" => '0%', :Mounted_on => '/dev/shm' }, + 'DiskInfo_7' => { :Filesystem => 'tmpfs', :"1K-blocks" => '5387012', :Used => '0', :Available => '5387012', :"Use%" => '0%', :Mounted_on => '/sys/firmware' } }) + } + end + + context 'when running on MacOS' do + let(:cols_count) { 8 } + let(:data) { "Filesystem 512-blocks Used Available Capacity iused ifree %iused Mounted on\n/dev/disk1s1 976490576 778131600 173031648 82% 2431747 9223372036852344060 0% /\ndevfs 690 690 0 100% 1194 0 100% /dev\n/dev/disk1s4 976490576 23925200 173031648 13% 5 9223372036854775802 0% /private/var/vm\nmap -hosts 0 0 0 100% 0 0 100% /net\nmap auto_home 0 0 0 100% 0 0 100% /home\n/dev/disk1s3 976490576 996584 173031648 1% 34 9223372036854775773 0% /Volumes/Recovery\n" } + + it { + expect(data_keys).to eq(['Filesystem', '512-blocks', 'Used', 'Available', 'Capacity', 'iused', 'ifree', '%iused', 'Mounted on']) + } + + it { + expect(disk_space_info).to eq({ 'DiskInfo_2' => { :Filesystem => '/dev/disk1s1', :"512-blocks" => '976490576', :Used => '778131600', :Available => '173031648', :Capacity => '82%', :iused => '2431747', :ifree => '9223372036852344060', :"%iused" => '0%', :Mounted_on => '/' }, + 'DiskInfo_3' => { :Filesystem => 'devfs', :"512-blocks" => '690', :Used => '690', :Available => '0', :Capacity => '100%', :iused => '1194', :ifree => '0', :"%iused" => '100%', :Mounted_on => '/dev' }, + 'DiskInfo_4' => { :Filesystem => '/dev/disk1s4', :"512-blocks" => '976490576', :Used => '23925200', :Available => '173031648', :Capacity => '13%', :iused => '5', :ifree => '9223372036854775802', :"%iused" => '0%', :Mounted_on => '/private/var/vm' }, + 'DiskInfo_5' => { :Filesystem => 'map', :"512-blocks" => '-hosts', :Used => '0', :Available => '0', :Capacity => '0', :iused => '100%', :ifree => '0', :"%iused" => '0', :Mounted_on => '100% /net ' }, + 'DiskInfo_6' => { :Filesystem => 'map', :"512-blocks" => 'auto_home', :Used => '0', :Available => '0', :Capacity => '0', :iused => '100%', :ifree => '0', :"%iused" => '0', :Mounted_on => '100% /home ' }, + 'DiskInfo_7' => { :Filesystem => '/dev/disk1s3', :"512-blocks" => '976490576', :Used => '996584', :Available => '173031648', :Capacity => '1%', :iused => '34', :ifree => '9223372036854775773', :"%iused" => '0%', :Mounted_on => '/Volumes/Recovery' } }) + } + end +end diff --git a/kpm/spec/kpm/unit/entropy_information_spec.rb b/kpm/spec/kpm/unit/entropy_information_spec.rb new file mode 100644 index 00000000..3853c46e --- /dev/null +++ b/kpm/spec/kpm/unit/entropy_information_spec.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'kpm/system_helpers/system_proxy' +require 'kpm/system_helpers/entropy_available' + +describe KPM::SystemProxy::EntropyAvailable do + subject { described_class.new } + let(:entropy_info) { subject.send(:build_hash, data) } + + context 'when running on Linux' do + let(:data) { '182' } + + it { + expect(subject.labels).to eq([{ label: :entropy }, + { label: :value }]) + } + + it { + expect(entropy_info).to eq({ 'entropy_available' => { entropy: 'available', value: '182' } }) + } + end + + context 'when running on MacOS' do + let(:data) { '-' } + + it { + expect(subject.labels).to eq([{ label: :entropy }, + { label: :value }]) + } + + it { + expect(entropy_info).to eq({ 'entropy_available' => { entropy: 'available', value: '-' } }) + } + end +end diff --git a/kpm/spec/kpm/unit/formatter_spec.rb b/kpm/spec/kpm/unit/formatter_spec.rb new file mode 100644 index 00000000..7a55fa85 --- /dev/null +++ b/kpm/spec/kpm/unit/formatter_spec.rb @@ -0,0 +1,163 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Formatter' do + describe KPM::Formatter::DefaultFormatter do + subject { described_class.new(label, input) } + + context 'when arguments are nil' do + let(:label) { nil } + let(:input) { nil } + + it { expect(subject.size).to eq(0) } + it { expect(subject.to_s).to eq('') } + it { expect(subject.label).to eq('') } + end + + context 'when arguments are non-nil' do + let(:label) { 'my_label' } + let(:input) { 'my_value' } + + it { expect(subject.size).to eq(8) } + it { expect(subject.to_s).to eq('my_value') } + it { expect(subject.label).to eq('MY LABEL') } + end + end + + describe KPM::Formatter::VersionFormatter do + subject { described_class.new(label, versions) } + + context 'when arguments are nil/empty' do + let(:label) { nil } + let(:versions) { [] } + + it { expect(subject.size).to eq(0) } + it { expect(subject.to_s).to eq('') } + it { expect(subject.label).to eq(' sha1=[], def=(*), del=(x)') } + end + + context 'when arguments are non-nil' do + let(:label) { 'my_label' } + let(:versions) do + [{ version: '1.0', is_default: false, is_disabled: false, sha1: nil }, + { version: '2.0', is_default: true, is_disabled: true, sha1: '123456789' }] + end + + it { expect(subject.size).to eq(29) } + it { expect(subject.to_s).to eq('1.0[???], 2.0[123456..](*)(x)') } + it { expect(subject.label).to eq('MY LABEL sha1=[], def=(*), del=(x)') } + end + end + + describe KPM::Formatter do + subject { described_class.new } + + context 'when running inspect' do + let(:data) do + { 'killbill-kpm' => { plugin_name: 'killbill-kpm', plugin_path: '/var/tmp/bundles/plugins/ruby/killbill-kpm', type: 'ruby', versions: [{ version: '1.3.0', is_default: true, is_disabled: false, sha1: 'b350016c539abc48e51c97605ac1f08b441843d3' }], plugin_key: 'kpm', group_id: 'org.kill-bill.billing.plugin.ruby', artifact_id: 'kpm-plugin', packaging: 'tar.gz', classifier: nil }, + 'hello-world-plugin' => { plugin_name: 'hello-world-plugin', plugin_path: '/var/tmp/bundles/plugins/java/hello-world-plugin', type: 'java', versions: [{ version: '1.0.1-SNAPSHOT', is_default: true, is_disabled: false, sha1: nil }], plugin_key: 'dev:hello', group_id: nil, artifact_id: nil, packaging: nil, classifier: nil }, + 'analytics-plugin' => { plugin_name: 'analytics-plugin', plugin_path: '/var/tmp/bundles/plugins/java/analytics-plugin', type: 'java', versions: [{ version: '7.0.3-SNAPSHOT', is_default: true, is_disabled: false, sha1: nil }], plugin_key: 'analytics', group_id: nil, artifact_id: nil, packaging: nil, classifier: nil } } + end + let(:labels) do + [{ label: :plugin_name }, + { label: :plugin_key }, + { label: :type }, + { label: :group_id }, + { label: :artifact_id }, + { label: :packaging }, + { label: :versions, formatter: KPM::Formatter::VersionFormatter.name }] + end + let!(:labels_format_argument) { subject.send(:compute_labels, data, labels) } + + it { + expect(labels_format_argument).to eq(['PLUGIN NAME', + 'PLUGIN KEY', + 'TYPE', + 'GROUP ID', + 'ARTIFACT ID', + 'PACKAGING', + 'VERSIONS sha1=[], def=(*), del=(x)']) + } + + it { + expect(labels).to eq([{ label: :plugin_name, size: 18 }, + { label: :plugin_key, size: 10 }, + { label: :type, size: 4 }, + { label: :group_id, size: 33 }, + { label: :artifact_id, size: 11 }, + { label: :packaging, size: 9 }, + { label: :versions, formatter: KPM::Formatter::VersionFormatter.name, size: 34 }]) + } + + it { + # labels have the size computed here already + expect(subject.send(:compute_border, labels)).to eq('_____________________________________________________________________________________________________________________________________________') + } + + it { + # labels have the size computed here already + expect(subject.send(:compute_format, labels)).to eq('| %18s | %10s | %4s | %33s | %11s | %9s | %34s |') + } + + it { + expect(subject.send(:format_only, data, labels)).to eq("\n_____________________________________________________________________________________________________________________________________________ +| PLUGIN NAME | PLUGIN KEY | TYPE | GROUP ID | ARTIFACT ID | PACKAGING | VERSIONS sha1=[], def=(*), del=(x) | +_____________________________________________________________________________________________________________________________________________ +| killbill-kpm | kpm | ruby | org.kill-bill.billing.plugin.ruby | kpm-plugin | tar.gz | 1.3.0[b35001..](*) | +| hello-world-plugin | dev:hello | java | ??? | ??? | ??? | 1.0.1-SNAPSHOT[???](*) | +| analytics-plugin | analytics | java | ??? | ??? | ??? | 7.0.3-SNAPSHOT[???](*) | +_____________________________________________________________________________________________________________________________________________\n\n") + } + end + + context 'when formatting CPU information' do + let(:data) do + { 'Processor Name' => { cpu_detail: 'Processor Name', value: 'Intel Core i5' }, + 'Processor Speed' => { cpu_detail: 'Processor Speed', value: '3.1 GHz' }, + 'Number of Processors' => { cpu_detail: 'Number of Processors', value: '1' }, + 'Total Number of Cores' => { cpu_detail: 'Total Number of Cores', value: '2' }, + 'L2 Cache (per Core)' => { cpu_detail: 'L2 Cache (per Core)', value: '256 KB' }, + 'L3 Cache' => { cpu_detail: 'L3 Cache', value: '4 MB' } } + end + let(:labels) do + [{ label: :cpu_detail }, + { label: :value }] + end + let!(:labels_format_argument) { subject.send(:compute_labels, data, labels) } + + it { + expect(labels_format_argument).to eq(['CPU DETAIL', + 'VALUE']) + } + + it { + expect(labels).to eq([{ label: :cpu_detail, size: 21 }, + { label: :value, size: 13 }]) + } + + it { + # labels have the size computed here already + expect(subject.send(:compute_border, labels)).to eq('_________________________________________') + } + + it { + # labels have the size computed here already + expect(subject.send(:compute_format, labels)).to eq('| %21s | %13s |') + } + + it { + expect(subject.send(:format_only, data, labels)).to eq("\n_________________________________________ +| CPU DETAIL | VALUE | +_________________________________________ +| Processor Name | Intel Core i5 | +| Processor Speed | 3.1 GHz | +| Number of Processors | 1 | +| Total Number of Cores | 2 | +| L2 Cache (per Core) | 256 KB | +| L3 Cache | 4 MB | +_________________________________________\n\n") + } + end + end +end diff --git a/kpm/spec/kpm/unit/inspector_spec.rb b/kpm/spec/kpm/unit/inspector_spec.rb index 5d9aa2df..d312459f 100644 --- a/kpm/spec/kpm/unit/inspector_spec.rb +++ b/kpm/spec/kpm/unit/inspector_spec.rb @@ -1,7 +1,8 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::Inspector do - before(:each) do @logger = Logger.new(STDOUT) @logger.level = Logger::INFO @@ -20,72 +21,64 @@ @manager = KPM::PluginsManager.new(@plugins_dir, @logger) - @sha1_file = @bundles_dir.join("sha1.yml") + @sha1_file = @bundles_dir.join('sha1.yml') @sha1_checker = KPM::Sha1Checker.from_file(@sha1_file) - end - it 'should parse a correctly setup env' do - - add_plugin('foo', 'plugin_foo', ['1.2.3', '2.0.0', '2.0.1'], 'ruby', 'com.foo', 'foo', 'tar.gz', nil, ['12345', '23456', '34567'], '2.0.1', ['1.2.3']) + add_plugin('foo', 'plugin_foo', ['1.2.3', '2.0.0', '2.0.1'], 'ruby', 'com.foo', 'foo', 'tar.gz', nil, %w[12345 23456 34567], '2.0.1', ['1.2.3']) add_plugin('bar', 'plugin_bar', ['1.0.0'], 'java', 'com.bar', 'bar', 'jar', nil, ['98765'], nil, []) inspector = KPM::Inspector.new all_plugins = inspector.inspect(@bundles_dir) - all_plugins.size == 2 - - all_plugins['plugin_bar']['plugin_key'] == 'bar' - all_plugins['plugin_bar']['plugin_path'] == @java_plugins_dir.join('plugin_bar').to_s - all_plugins['plugin_bar'][:versions].size == 1 - all_plugins['plugin_bar'][:versions][0][:version] == '1.0.0' - all_plugins['plugin_bar'][:versions][0][:is_default] == true - all_plugins['plugin_bar'][:versions][0][:is_disabled] == false - all_plugins['plugin_bar'][:versions][0][:sha1] == '98765' - - all_plugins['plugin_foo']['plugin_key'] == 'foo' - all_plugins['plugin_foo']['plugin_path'] == @ruby_plugins_dir.join('plugin_foo').to_s - all_plugins['plugin_foo'][:versions].size == 3 - - all_plugins['plugin_foo'][:versions][0][:version] == '1.2.3' - all_plugins['plugin_foo'][:versions][0][:is_default] == false - all_plugins['plugin_foo'][:versions][0][:is_disabled] == true - all_plugins['plugin_foo'][:versions][0][:sha1] == '12345' - - all_plugins['plugin_foo'][:versions][1][:version] == '2.0.0' - all_plugins['plugin_foo'][:versions][1][:is_default] == false - all_plugins['plugin_foo'][:versions][1][:is_disabled] == false - all_plugins['plugin_foo'][:versions][1][:sha1] == '23456' - - all_plugins['plugin_foo'][:versions][2][:version] == '2.0.1' - all_plugins['plugin_foo'][:versions][2][:is_default] == true - all_plugins['plugin_foo'][:versions][2][:is_disabled] == false - all_plugins['plugin_foo'][:versions][2][:sha1] == '34567' - + all_plugins.size.should eq 2 + + all_plugins['plugin_bar'][:plugin_key].should eq 'bar' + all_plugins['plugin_bar'][:plugin_path].should eq @java_plugins_dir.join('plugin_bar').to_s + all_plugins['plugin_bar'][:versions].size.should eq 1 + all_plugins['plugin_bar'][:versions][0][:version].should eq '1.0.0' + all_plugins['plugin_bar'][:versions][0][:is_default].should eq false + all_plugins['plugin_bar'][:versions][0][:is_disabled].should eq false + all_plugins['plugin_bar'][:versions][0][:sha1].should eq '98765' + + all_plugins['plugin_foo'][:plugin_key].should eq 'foo' + all_plugins['plugin_foo'][:plugin_path].should eq @ruby_plugins_dir.join('plugin_foo').to_s + all_plugins['plugin_foo'][:versions].size.should eq 3 + + all_plugins['plugin_foo'][:versions][0][:version].should eq '1.2.3' + all_plugins['plugin_foo'][:versions][0][:is_default].should eq false + all_plugins['plugin_foo'][:versions][0][:is_disabled].should eq true + all_plugins['plugin_foo'][:versions][0][:sha1].should eq '12345' + + all_plugins['plugin_foo'][:versions][1][:version].should eq '2.0.0' + all_plugins['plugin_foo'][:versions][1][:is_default].should eq false + all_plugins['plugin_foo'][:versions][1][:is_disabled].should eq false + all_plugins['plugin_foo'][:versions][1][:sha1].should eq '23456' + + all_plugins['plugin_foo'][:versions][2][:version].should eq '2.0.1' + all_plugins['plugin_foo'][:versions][2][:is_default].should eq true + all_plugins['plugin_foo'][:versions][2][:is_disabled].should eq false + all_plugins['plugin_foo'][:versions][2][:sha1].should eq '34567' end - private def add_plugin(plugin_key, plugin_name, versions, language, group_id, artifact_id, packaging, classifier, sha1, active_version, disabled_versions) - plugin_dir = language == 'ruby' ? @ruby_plugins_dir.join(plugin_name) : @java_plugins_dir.join(plugin_name) versions.each_with_index do |v, idx| - - coordinate_map = {:group_id => group_id, :artifact_id => artifact_id, :version => v, :packaging => packaging, :classifier => classifier} + coordinate_map = { group_id: group_id, artifact_id: artifact_id, version: v, packaging: packaging, classifier: classifier } coordinates = KPM::Coordinates.build_coordinates(coordinate_map) @manager.add_plugin_identifier_key(plugin_key, plugin_name, language, coordinate_map) @sha1_checker.add_or_modify_entry!(coordinates, sha1[idx]) - plugin_dir_version = plugin_dir.join(v) FileUtils.mkdir_p(plugin_dir_version) # Create some entry to look real - some_file = 'ruby' ? 'ROOT' : '#{plugin_name}.jar' + some_file = language == 'ruby' ? 'ROOT' : "#{plugin_name}.jar" FileUtils.touch(plugin_dir_version.join(some_file)) end @@ -95,5 +88,4 @@ def add_plugin(plugin_key, plugin_name, versions, language, group_id, artifact_i @manager.uninstall(plugin_dir, v) end end - end diff --git a/kpm/spec/kpm/unit/installer_spec.rb b/kpm/spec/kpm/unit/installer_spec.rb index 8cb1f1eb..fe855261 100644 --- a/kpm/spec/kpm/unit/installer_spec.rb +++ b/kpm/spec/kpm/unit/installer_spec.rb @@ -1,17 +1,18 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::Installer do - context 'when no config file is specified' do - let(:all_kb_versions) { %w(0.15.0 0.15.1 0.15.10 0.15.11-SNAPSHOT 0.15.2 0.15.3 0.16.0 0.16.1 0.16.10 0.16.11 0.16.12-SNAPSHOT 0.16.2 0.16.3 0.17.0 0.17.1 0.17.2 0.17.2-SNAPSHOT 0.17.3-SNAPSHOT) } + let(:all_kb_versions) { %w[0.15.0 0.15.1 0.15.10 0.15.11-SNAPSHOT 0.15.2 0.15.3 0.16.0 0.16.1 0.16.10 0.16.11 0.16.12-SNAPSHOT 0.16.2 0.16.3 0.17.0 0.17.1 0.17.2 0.17.2-SNAPSHOT 0.17.3-SNAPSHOT] } it 'finds the right stable versions' do config = KPM::Installer.build_default_config(all_kb_versions) config['killbill'].should_not be_nil - config['killbill']['version'].should == '0.16.11' + config['killbill']['version'].should eq '0.16.11' config['kaui'].should_not be_nil - config['kaui']['version'].should == 'LATEST' + config['kaui']['version'].should eq 'LATEST' end end end diff --git a/kpm/spec/kpm/unit/memory_information_spec.rb b/kpm/spec/kpm/unit/memory_information_spec.rb new file mode 100644 index 00000000..6773dea9 --- /dev/null +++ b/kpm/spec/kpm/unit/memory_information_spec.rb @@ -0,0 +1,102 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'kpm/system_helpers/system_proxy' +require 'kpm/system_helpers/memory_information' + +describe KPM::SystemProxy::MemoryInformation do + subject { described_class.new } + + context 'when running on Linux' do + let(:data) { "MemTotal: 10774024 kB\nMemFree: 3788232 kB\nMemAvailable: 9483696 kB\nBuffers: 269216 kB\nCached: 5448624 kB\nSwapCached: 0 kB\nActive: 3562072 kB\nInactive: 2913296 kB\nActive(anon): 827072 kB\nInactive(anon): 124844 kB\nActive(file): 2735000 kB\nInactive(file): 2788452 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 3620520 kB\nSwapFree: 3620520 kB\nDirty: 16 kB\nWriteback: 0 kB\nAnonPages: 757472 kB\nMapped: 71548 kB\nShmem: 194392 kB\nSlab: 468096 kB\nSReclaimable: 425428 kB\nSUnreclaim: 42668 kB\nKernelStack: 4816 kB\nPageTables: 3420 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 9007532 kB\nCommitted_AS: 1711072 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 0 kB\nVmallocChunk: 0 kB\nAnonHugePages: 622592 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 166848 kB\nDirectMap2M: 10883072 kB\n" } + let(:memory_info) { subject.send(:build_hash, data) } + + it { + expect(subject.labels).to eq([{ label: :memory_detail }, + { label: :value }]) + } + + it { + expect(memory_info).to eq({ 'MemTotal' => { memory_detail: 'MemTotal', value: '10774024 kB' }, + 'MemFree' => { memory_detail: 'MemFree', value: '3788232 kB' }, + 'MemAvailable' => { memory_detail: 'MemAvailable', value: '9483696 kB' }, + 'Buffers' => { memory_detail: 'Buffers', value: '269216 kB' }, + 'Cached' => { memory_detail: 'Cached', value: '5448624 kB' }, + 'SwapCached' => { memory_detail: 'SwapCached', value: '0 kB' }, + 'Active' => { memory_detail: 'Active', value: '3562072 kB' }, + 'Inactive' => { memory_detail: 'Inactive', value: '2913296 kB' }, + 'Active(anon)' => { memory_detail: 'Active(anon)', value: '827072 kB' }, + 'Inactive(anon)' => { memory_detail: 'Inactive(anon)', value: '124844 kB' }, + 'Active(file)' => { memory_detail: 'Active(file)', value: '2735000 kB' }, + 'Inactive(file)' => { memory_detail: 'Inactive(file)', value: '2788452 kB' }, + 'Unevictable' => { memory_detail: 'Unevictable', value: '0 kB' }, + 'Mlocked' => { memory_detail: 'Mlocked', value: '0 kB' }, + 'SwapTotal' => { memory_detail: 'SwapTotal', value: '3620520 kB' }, + 'SwapFree' => { memory_detail: 'SwapFree', value: '3620520 kB' }, + 'Dirty' => { memory_detail: 'Dirty', value: '16 kB' }, + 'Writeback' => { memory_detail: 'Writeback', value: '0 kB' }, + 'AnonPages' => { memory_detail: 'AnonPages', value: '757472 kB' }, + 'Mapped' => { memory_detail: 'Mapped', value: '71548 kB' }, + 'Shmem' => { memory_detail: 'Shmem', value: '194392 kB' }, + 'Slab' => { memory_detail: 'Slab', value: '468096 kB' }, + 'SReclaimable' => { memory_detail: 'SReclaimable', value: '425428 kB' }, + 'SUnreclaim' => { memory_detail: 'SUnreclaim', value: '42668 kB' }, + 'KernelStack' => { memory_detail: 'KernelStack', value: '4816 kB' }, + 'PageTables' => { memory_detail: 'PageTables', value: '3420 kB' }, + 'NFS_Unstable' => { memory_detail: 'NFS_Unstable', value: '0 kB' }, + 'Bounce' => { memory_detail: 'Bounce', value: '0 kB' }, + 'WritebackTmp' => { memory_detail: 'WritebackTmp', value: '0 kB' }, + 'CommitLimit' => { memory_detail: 'CommitLimit', value: '9007532 kB' }, + 'Committed_AS' => { memory_detail: 'Committed_AS', value: '1711072 kB' }, + 'VmallocTotal' => { memory_detail: 'VmallocTotal', value: '34359738367 kB' }, + 'VmallocUsed' => { memory_detail: 'VmallocUsed', value: '0 kB' }, + 'VmallocChunk' => { memory_detail: 'VmallocChunk', value: '0 kB' }, + 'AnonHugePages' => { memory_detail: 'AnonHugePages', value: '622592 kB' }, + 'HugePages_Total' => { memory_detail: 'HugePages_Total', value: '0' }, + 'HugePages_Free' => { memory_detail: 'HugePages_Free', value: '0' }, + 'HugePages_Rsvd' => { memory_detail: 'HugePages_Rsvd', value: '0' }, + 'HugePages_Surp' => { memory_detail: 'HugePages_Surp', value: '0' }, + 'Hugepagesize' => { memory_detail: 'Hugepagesize', value: '2048 kB' }, + 'DirectMap4k' => { memory_detail: 'DirectMap4k', value: '166848 kB' }, + 'DirectMap2M' => { memory_detail: 'DirectMap2M', value: '10883072 kB' } }) + } + end + + context 'when running on MacOS' do + let(:mem_data) { "Mach Virtual Memory Statistics: (page size of 4096 bytes)\nPages free: 20436\nPages active: 279093\nPages inactive: 276175\nPages speculative: 2492\nPages throttled: 0\nPages wired down: 3328540\nPages purgeable: 47378\n\"Translation faults\": 1774872371\nPages copy-on-write: 34313850\nPages zero filled: 1023660277\nPages reactivated: 194623586\nPages purged: 70443047\nFile-backed pages: 119033\nAnonymous pages: 438727\nPages stored in compressor: 2771982\nPages occupied by compressor: 287324\nDecompressions: 252938013\nCompressions: 328708973\nPageins: 66884005\nPageouts: 1122278\nSwapins: 110783726\nSwapouts: 113589173\n" } + let(:mem_total_data) { " Memory: 16 GB\n" } + let(:memory_info) { subject.send(:build_hash_mac, mem_data, mem_total_data) } + + it { + expect(subject.labels).to eq([{ label: :memory_detail }, + { label: :value }]) + } + + it { + expect(memory_info).to eq({ 'Memory' => { memory_detail: 'Memory', value: '16 GB' }, + 'Mach Virtual Memory Statistics' => { memory_detail: 'Mach Virtual Memory Statistics', value: '0MB' }, + 'Pages free' => { memory_detail: 'Memory free', value: '79MB' }, + 'Pages active' => { memory_detail: 'Memory active', value: '1090MB' }, + 'Pages inactive' => { memory_detail: 'Memory inactive', value: '1078MB' }, + 'Pages speculative' => { memory_detail: 'Memory speculative', value: '9MB' }, + 'Pages throttled' => { memory_detail: 'Memory throttled', value: '0MB' }, + 'Pages wired down' => { memory_detail: 'Memory wired down', value: '13002MB' }, + 'Pages purgeable' => { memory_detail: 'Memory purgeable', value: '185MB' }, + 'Translation faults' => { memory_detail: 'Translation faults', value: '6933095MB' }, + 'Pages copy-on-write' => { memory_detail: 'Memory copy-on-write', value: '134038MB' }, + 'Pages zero filled' => { memory_detail: 'Memory zero filled', value: '3998672MB' }, + 'Pages reactivated' => { memory_detail: 'Memory reactivated', value: '760248MB' }, + 'Pages purged' => { memory_detail: 'Memory purged', value: '275168MB' }, + 'File-backed pages' => { memory_detail: 'File-backed pages', value: '464MB' }, + 'Anonymous pages' => { memory_detail: 'Anonymous pages', value: '1713MB' }, + 'Pages stored in compressor' => { memory_detail: 'Memory stored in compressor', value: '10828MB' }, + 'Pages occupied by compressor' => { memory_detail: 'Memory occupied by compressor', value: '1122MB' }, + 'Decompressions' => { memory_detail: 'Decompressions', value: '988039MB' }, + 'Compressions' => { memory_detail: 'Compressions', value: '1284019MB' }, + 'Pageins' => { memory_detail: 'Pageins', value: '261265MB' }, + 'Pageouts' => { memory_detail: 'Pageouts', value: '4383MB' }, + 'Swapins' => { memory_detail: 'Swapins', value: '432748MB' }, + 'Swapouts' => { memory_detail: 'Swapouts', value: '443707MB' } }) + } + end +end diff --git a/kpm/spec/kpm/unit/os_information_spec.rb b/kpm/spec/kpm/unit/os_information_spec.rb new file mode 100644 index 00000000..978480f3 --- /dev/null +++ b/kpm/spec/kpm/unit/os_information_spec.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'kpm/system_helpers/system_proxy' +require 'kpm/system_helpers/os_information' + +describe KPM::SystemProxy::OsInformation do + subject { described_class.new } + let(:os_data) { subject.send(:build_hash, data) } + + context 'when running on Linux' do + let(:data) { "Description:Ubuntu 16.04.1 LTS \n\n" } + + it { + expect(subject.labels).to eq([{ label: :os_detail }, + { label: :value }]) + } + + it { + expect(os_data).to eq({ 'Description' => { :os_detail => 'Description', :value => 'Ubuntu 16.04.1 LTS' } }) + } + end + + context 'when running on MacOS' do + let(:data) { "ProductName:\tMac OS X\nProductVersion:\t10.14.6\nBuildVersion:\t18G87\n" } + + it { + expect(subject.labels).to eq([{ label: :os_detail }, + { label: :value }]) + } + + it { + expect(os_data).to eq({ 'ProductName' => { :os_detail => 'ProductName', :value => 'Mac OS X' }, + 'ProductVersion' => { :os_detail => 'ProductVersion', :value => '10.14.6' }, + 'BuildVersion' => { :os_detail => 'BuildVersion', :value => '18G87' } }) + } + end +end diff --git a/kpm/spec/kpm/unit/plugins_directory_spec.rb b/kpm/spec/kpm/unit/plugins_directory_spec.rb index 514caeb7..95a70b27 100644 --- a/kpm/spec/kpm/unit/plugins_directory_spec.rb +++ b/kpm/spec/kpm/unit/plugins_directory_spec.rb @@ -1,36 +1,52 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::PluginsDirectory do - it 'should parse the plugins directory' do directory = KPM::PluginsDirectory.all(false) directory.size.should > 0 end - it 'should lookup plugins' do - group_id, artifact_id, packaging, classifier, version, type = KPM::PluginsDirectory.lookup('analytics', false, '0.14') - group_id.should == 'org.kill-bill.billing.plugin.java' - artifact_id.should == 'analytics-plugin' - packaging.should == 'jar' + group_id, artifact_id, packaging, classifier, version, type = KPM::PluginsDirectory.lookup('analytics', false, '0.20.11') + group_id.should eq 'org.kill-bill.billing.plugin.java' + artifact_id.should eq 'analytics-plugin' + packaging.should eq 'jar' + classifier.should be_nil + version.should eq '6.0.1' + type.should eq :java + + group_id, artifact_id, packaging, classifier, version, type = KPM::PluginsDirectory.lookup('analytics', false, '0.20.11-SNAPSHOT') + group_id.should eq 'org.kill-bill.billing.plugin.java' + artifact_id.should eq 'analytics-plugin' + packaging.should eq 'jar' + classifier.should be_nil + version.should eq '6.0.1' + type.should eq :java + + group_id, artifact_id, packaging, classifier, version, type = KPM::PluginsDirectory.lookup('analytics', false, '0.20') + group_id.should eq 'org.kill-bill.billing.plugin.java' + artifact_id.should eq 'analytics-plugin' + packaging.should eq 'jar' classifier.should be_nil - version.should == '1.0.3' - type.should == :java + version.should eq '6.0.1' + type.should eq :java group_id, artifact_id, packaging, classifier, version, type = KPM::PluginsDirectory.lookup('analytics', false, 'LATEST') - group_id.should == 'org.kill-bill.billing.plugin.java' - artifact_id.should == 'analytics-plugin' - packaging.should == 'jar' + group_id.should eq 'org.kill-bill.billing.plugin.java' + artifact_id.should eq 'analytics-plugin' + packaging.should eq 'jar' classifier.should be_nil - version.should == 'LATEST' - type.should == :java + version.should eq 'LATEST' + type.should eq :java group_id, artifact_id, packaging, classifier, version, type = KPM::PluginsDirectory.lookup('analytics', false, '0.42') - group_id.should == 'org.kill-bill.billing.plugin.java' - artifact_id.should == 'analytics-plugin' - packaging.should == 'jar' + group_id.should eq 'org.kill-bill.billing.plugin.java' + artifact_id.should eq 'analytics-plugin' + packaging.should eq 'jar' classifier.should be_nil - version.should == 'LATEST' - type.should == :java + version.should eq 'LATEST' + type.should eq :java end end diff --git a/kpm/spec/kpm/unit/plugins_manager_spec.rb b/kpm/spec/kpm/unit/plugins_manager_spec.rb index 8e374307..f61a6f25 100644 --- a/kpm/spec/kpm/unit/plugins_manager_spec.rb +++ b/kpm/spec/kpm/unit/plugins_manager_spec.rb @@ -1,7 +1,8 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::PluginsManager do - before(:each) do logger = Logger.new(STDOUT) logger.level = Logger::INFO @@ -14,7 +15,7 @@ FileUtils.mkdir_p(@plugin_dir.join('1.0.0')) FileUtils.mkdir_p(@plugin_dir.join('2.0.0')) - File.exists?(@plugin_dir.join('SET_DEFAULT')).should be_false + File.exist?(@plugin_dir.join('SET_DEFAULT')).should be_false end after(:each) do @@ -24,133 +25,128 @@ it 'creates a plugin identifier entry with no coordinate' do # Verifies file gets created if does not exist identifiers = @manager.add_plugin_identifier_key('foo', 'foo_name', 'type', nil) - identifiers.size.should == 1 - identifiers['foo']['plugin_name'].should == 'foo_name' + identifiers.size.should eq 1 + identifiers['foo']['plugin_name'].should eq 'foo_name' end it 'creates a plugin identifier entry with coordinates' do # Verifies file gets created if does not exist - coordinate_map = {:group_id => 'group', :artifact_id => 'artifact', :packaging => 'packaging', :version => 'version'} + coordinate_map = { group_id: 'group', artifact_id: 'artifact', packaging: 'packaging', version: 'version' } identifiers = @manager.add_plugin_identifier_key('bar', 'bar_name', 'type', coordinate_map) - identifiers.size.should == 1 - identifiers['bar']['plugin_name'].should == 'bar_name' - identifiers['bar']['group_id'].should == 'group' - identifiers['bar']['artifact_id'].should == 'artifact' - identifiers['bar']['packaging'].should == 'packaging' - identifiers['bar']['classifier'].should == nil - identifiers['bar']['version'].should == 'version' + identifiers.size.should eq 1 + identifiers['bar']['plugin_name'].should eq 'bar_name' + identifiers['bar']['group_id'].should eq 'group' + identifiers['bar']['artifact_id'].should eq 'artifact' + identifiers['bar']['packaging'].should eq 'packaging' + identifiers['bar']['classifier'].should.nil? + identifiers['bar']['version'].should eq 'version' end - it 'creates plugin identifier with multiple entries' do # Verifies file gets created if does not exist identifiers = @manager.add_plugin_identifier_key('foo', 'foo_name', 'type', nil) - identifiers.size.should == 1 - identifiers['foo']['plugin_name'].should == 'foo_name' + identifiers.size.should eq 1 + identifiers['foo']['plugin_name'].should eq 'foo_name' # Verify file was created from previous entry (prev value was read) identifiers = @manager.add_plugin_identifier_key('bar', 'bar_name', 'type', nil) - identifiers.size.should == 2 - identifiers['foo']['plugin_name'].should == 'foo_name' - identifiers['bar']['plugin_name'].should == 'bar_name' - + identifiers.size.should eq 2 + identifiers['foo']['plugin_name'].should eq 'foo_name' + identifiers['bar']['plugin_name'].should eq 'bar_name' # Verify file was created from previous entry (prev value was read) identifiers = @manager.add_plugin_identifier_key('zoe', 'zoe_name', 'type', nil) - identifiers.size.should == 3 - identifiers['bar']['plugin_name'].should == 'bar_name' - identifiers['foo']['plugin_name'].should == 'foo_name' - identifiers['zoe']['plugin_name'].should == 'zoe_name' + identifiers.size.should eq 3 + identifiers['bar']['plugin_name'].should eq 'bar_name' + identifiers['foo']['plugin_name'].should eq 'foo_name' + identifiers['zoe']['plugin_name'].should eq 'zoe_name' end it 'creates plugin identifiers with duplicate entries' do # Verifies file gets created if does not exist identifiers = @manager.add_plugin_identifier_key('kewl', 'kewl_name', 'type', nil) - identifiers.size.should == 1 - identifiers['kewl']['plugin_name'].should == 'kewl_name' + identifiers.size.should eq 1 + identifiers['kewl']['plugin_name'].should eq 'kewl_name' # Add with a different plugin_name identifiers = @manager.add_plugin_identifier_key('kewl', 'kewl_name2', 'type', nil) - identifiers.size.should == 1 - identifiers['kewl']['plugin_name'].should == 'kewl_name' + identifiers.size.should eq 1 + identifiers['kewl']['plugin_name'].should eq 'kewl_name' end - it 'creates plugin identifiers and remove entry' do # Verifies file gets created if does not exist identifiers = @manager.add_plugin_identifier_key('lol', 'lol_name', 'type', nil) - identifiers.size.should == 1 - identifiers['lol']['plugin_name'].should == 'lol_name' + identifiers.size.should eq 1 + identifiers['lol']['plugin_name'].should eq 'lol_name' # Remove wrong entry, nothing happens identifiers = @manager.remove_plugin_identifier_key('lol2') - identifiers.size.should == 1 - identifiers['lol']['plugin_name'].should == 'lol_name' + identifiers.size.should eq 1 + identifiers['lol']['plugin_name'].should eq 'lol_name' # Remove correct entry identifiers = @manager.remove_plugin_identifier_key('lol') - identifiers.size.should == 0 + identifiers.size.should eq 0 # Add same entry again identifiers = @manager.add_plugin_identifier_key('lol', 'lol_name', 'type', nil) - identifiers.size.should == 1 - identifiers['lol']['plugin_name'].should == 'lol_name' + identifiers.size.should eq 1 + identifiers['lol']['plugin_name'].should eq 'lol_name' end it 'creates plugin identifiers and validate entry' do # Verifies file gets created if does not exist - coordinate_map = {:group_id => 'group', :artifact_id => 'artifact', :packaging => 'packaging', :version => 'version'} + coordinate_map = { group_id: 'group', artifact_id: 'artifact', packaging: 'packaging', version: 'version' } - identifiers = @manager.add_plugin_identifier_key('yoyo', 'yoyo_name', 'type', coordinate_map) - identifiers.size.should == 1 - identifiers['yoyo']['plugin_name'].should == 'yoyo_name' + identifiers = @manager.add_plugin_identifier_key('yoyo', 'yoyo_name', 'type', coordinate_map) + identifiers.size.should eq 1 + identifiers['yoyo']['plugin_name'].should eq 'yoyo_name' - @manager.validate_plugin_identifier_key('yoyo', coordinate_map).should == true + @manager.validate_plugin_identifier_key('yoyo', coordinate_map).should eq true # Negative validation - invalid_coordinate_map = {:group_id => 'group1', :artifact_id => 'artifact', :packaging => 'packaging', :version => 'version'} + invalid_coordinate_map = { group_id: 'group1', artifact_id: 'artifact', packaging: 'packaging', version: 'version' } - @manager.validate_plugin_identifier_key('yoyo', invalid_coordinate_map).should == false + @manager.validate_plugin_identifier_key('yoyo', invalid_coordinate_map).should eq false end - it 'creates a plugin identifier entry with a new version' do # Verifies file gets created if does not exist - coordinate_map1 = {:group_id => 'group', :artifact_id => 'artifact', :packaging => 'packaging', :version => 'version1'} + coordinate_map1 = { group_id: 'group', artifact_id: 'artifact', packaging: 'packaging', version: 'version1' } identifiers = @manager.add_plugin_identifier_key('bar', 'bar_name', 'type', coordinate_map1) - identifiers.size.should == 1 - identifiers['bar']['plugin_name'].should == 'bar_name' - identifiers['bar']['version'].should == 'version1' + identifiers.size.should eq 1 + identifiers['bar']['plugin_name'].should eq 'bar_name' + identifiers['bar']['version'].should eq 'version1' - coordinate_map2 = {:group_id => 'group', :artifact_id => 'artifact', :packaging => 'packaging', :version => 'version2'} + coordinate_map2 = { group_id: 'group', artifact_id: 'artifact', packaging: 'packaging', version: 'version2' } identifiers = @manager.add_plugin_identifier_key('bar', 'bar_name', 'type', coordinate_map2) - identifiers.size.should == 1 - identifiers['bar']['plugin_name'].should == 'bar_name' - identifiers['bar']['version'].should == 'version2' - + identifiers.size.should eq 1 + identifiers['bar']['plugin_name'].should eq 'bar_name' + identifiers['bar']['version'].should eq 'version2' end it 'sets a path as active' do @manager.set_active(@plugin_dir.join('1.0.0')) - File.exists?(@plugin_dir.join('SET_DEFAULT')).should be_true - File.readlink(@plugin_dir.join('SET_DEFAULT')).should == @plugin_dir.join('1.0.0').to_s + File.exist?(@plugin_dir.join('SET_DEFAULT')).should be_true + File.readlink(@plugin_dir.join('SET_DEFAULT')).should eq @plugin_dir.join('1.0.0').to_s @manager.set_active(@plugin_dir.join('2.0.0')) - File.exists?(@plugin_dir.join('SET_DEFAULT')).should be_true - File.readlink(@plugin_dir.join('SET_DEFAULT')).should == @plugin_dir.join('2.0.0').to_s + File.exist?(@plugin_dir.join('SET_DEFAULT')).should be_true + File.readlink(@plugin_dir.join('SET_DEFAULT')).should eq @plugin_dir.join('2.0.0').to_s end it 'sets a plugin version as active' do @manager.set_active('killbill-stripe', '2.0.0') - File.exists?(@plugin_dir.join('SET_DEFAULT')).should be_true - File.readlink(@plugin_dir.join('SET_DEFAULT')).should == @plugin_dir.join('2.0.0').to_s + File.exist?(@plugin_dir.join('SET_DEFAULT')).should be_true + File.readlink(@plugin_dir.join('SET_DEFAULT')).should eq @plugin_dir.join('2.0.0').to_s @manager.set_active('killbill-stripe', '1.0.0') - File.exists?(@plugin_dir.join('SET_DEFAULT')).should be_true - File.readlink(@plugin_dir.join('SET_DEFAULT')).should == @plugin_dir.join('1.0.0').to_s + File.exist?(@plugin_dir.join('SET_DEFAULT')).should be_true + File.readlink(@plugin_dir.join('SET_DEFAULT')).should eq @plugin_dir.join('1.0.0').to_s end it 'uninstalls a plugin via a path' do @@ -196,17 +192,17 @@ it 'guesses the plugin name' do @manager.guess_plugin_name('tripe').should be_nil # Short name - @manager.guess_plugin_name('stripe').should == 'killbill-stripe' + @manager.guess_plugin_name('stripe').should eq 'killbill-stripe' # Artifact id - @manager.guess_plugin_name('stripe-plugin').should == 'killbill-stripe' + @manager.guess_plugin_name('stripe-plugin').should eq 'killbill-stripe' # Plugin name (top directory in the .tar.gz) - @manager.guess_plugin_name('killbill-stripe').should == 'killbill-stripe' + @manager.guess_plugin_name('killbill-stripe').should eq 'killbill-stripe' end private def check_state(version, has_restart, has_disabled) - File.exists?(@plugin_dir.join(version).join('tmp').join('restart.txt')).should == has_restart - File.exists?(@plugin_dir.join(version).join('tmp').join('disabled.txt')).should == has_disabled + File.exist?(@plugin_dir.join(version).join('tmp').join('restart.txt')).should eq has_restart + File.exist?(@plugin_dir.join(version).join('tmp').join('disabled.txt')).should eq has_disabled end end diff --git a/kpm/spec/kpm/unit/sha1_checker_spec.rb b/kpm/spec/kpm/unit/sha1_checker_spec.rb index 05ffceba..eb55a380 100644 --- a/kpm/spec/kpm/unit/sha1_checker_spec.rb +++ b/kpm/spec/kpm/unit/sha1_checker_spec.rb @@ -1,103 +1,150 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::Sha1Checker do + let(:tmp_dir) { Dir.mktmpdir('sha1_checker_spec') } + let(:sha1_file) { File.join(tmp_dir, 'sha1.yml') } + let(:sha1_checker) { KPM::Sha1Checker.from_file(sha1_file) } + let(:sha1_content) do + { + 'sha1' => { 'org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0' => 'fce068c3fd5f95646ce0d09852f43ff67f06f0b9', + 'org.kill-bill.billing.plugin.ruby:killbill-plugin-nomatch:tar.gz:1.0.0' => 'ace068c3fd5f95646ce0d09852f43ff67f06f0b8', + 'org.kill-bill.billing.plugin.ruby:killbill-plugin-other:tar.gz:1.0.0' => 'bbb068c3fd5f95646ce0d09852f43ff67f06fccc' }, + 'nexus' => { 'org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0' => { :sha1 => 'fce068c3fd5f95646ce0d09852f43ff67f06f0b9', + :version => '1.0.0', + :repository_path => '/org/kill-bill/billing/plugin/ruby/killbill-plugin-match/1.0.0/killbill-plugin-match-1.0.0.tar.gz', + :is_tgz => true }, + 'org.kill-bill.billing.plugin.ruby:killbill-plugin-nomatch:tar.gz:1.0.0' => { :sha1 => 'ace068c3fd5f95646ce0d09852f43ff67f06f0b8', + :version => '1.0.0', + :repository_path => '/org/kill-bill/billing/plugin/ruby/killbill-plugin-nomatch/1.0.0/killbill-plugin-nomatch-1.0.0.tar.gz', + :is_tgz => true }, + 'org.kill-bill.billing.plugin.ruby:killbill-plugin-other:tar.gz:1.0.0' => { :sha1 => 'bbb068c3fd5f95646ce0d09852f43ff67f06fccc', + :version => '1.0.0', + :repository_path => '/org/kill-bill/billing/plugin/ruby/killbill-plugin-other/1.0.0/killbill-plugin-other-1.0.0.tar.gz', + :is_tgz => true } }, + 'killbill' => { '0.20.10' => { 'killbill' => '0.20.10', 'killbill-oss-parent' => '0.142.7', 'killbill-api' => '0.52.0', 'killbill-plugin-api' => '0.25.0', 'killbill-commons' => '0.22.3', 'killbill-platform' => '0.38.3' }, + '0.20.12' => { 'killbill' => '0.20.12', 'killbill-oss-parent' => '0.142.7', 'killbill-api' => '0.52.0', 'killbill-plugin-api' => '0.25.0', 'killbill-commons' => '0.22.3', 'killbill-platform' => '0.38.3' }, + '0.18.5' => { 'killbill' => '0.18.5', 'killbill-oss-parent' => '0.140.18', 'killbill-api' => '0.50.1', 'killbill-plugin-api' => '0.23.1', 'killbill-commons' => '0.20.5', 'killbill-platform' => '0.36.5' } } + } + end - before(:all) do - @logger = Logger.new(STDOUT) - @logger.level = Logger::INFO - - tmp_destination_dir = Dir.tmpdir() - init_config = File.join(File.dirname(__FILE__), 'sha1_test.yml') - FileUtils.copy(init_config, tmp_destination_dir) - @tmp_config = File.join(tmp_destination_dir, 'sha1_test.yml') - @sha1_checker = KPM::Sha1Checker.from_file(@tmp_config) + before do + File.open(sha1_file.to_s, 'w') { |l| l.puts(sha1_content.to_yaml) } end it 'should create intermediate directories' do Dir.mktmpdir do |dir| - config = File.join(dir, 'foo', 'bar', 'baz', 'sha1_test.yml') + config = File.join(dir, 'foo', 'bar', 'baz', 'sha1.yml') + expect(File.exist?(config)).to be_false KPM::Sha1Checker.from_file(config) + expect(File.exist?(config)).to be_true end end - it 'should find matching sha1' do - existing = @sha1_checker.sha1('killbill-plugin-match-1.0.0.tar.gz') - existing.should_not be_nil - existing.should == 'fce068c3fd5f95646ce0d09852f43ff67f06f0b9' + it 'translates LATEST when caching nexus info' do + sha1_checker.cache_artifact_info('org.kill-bill.billing.plugin.java:analytics-plugin:jar:LATEST', { :sha1 => '050594dd73a54d229ca3efcf69785345b8cd1681', + :version => '7.0.4', + :repository_path => '/org/kill-bill/billing/plugin/java/analytics-plugin/7.0.4/analytics-plugin-7.0.4.jar', + :is_tgz => false }) + expect(sha1_checker.artifact_info('org.kill-bill.billing.plugin.java:analytics-plugin:jar:LATEST')).to be_nil + expect(sha1_checker.artifact_info('org.kill-bill.billing.plugin.java:analytics-plugin:jar:7.0.4')[:sha1]).to eq('050594dd73a54d229ca3efcf69785345b8cd1681') end - it 'should NOT find sha1' do - existing = @sha1_checker.sha1('killbill-plugin-nomatch-1.0.0.tar.gz') - existing.should_not be_nil - existing.should_not == 'fce068c3fd5f95646ce0d09852f43ff67f06f0b9' + it 'never caches nexus info without version info' do + sha1_checker.cache_artifact_info('org.kill-bill.billing.plugin.java:analytics-plugin:jar:LATEST', { :sha1 => '050594dd73a54d229ca3efcf69785345b8cd1681', + :repository_path => '/org/kill-bill/billing/plugin/java/analytics-plugin/7.0.4/analytics-plugin-7.0.4.jar', + :is_tgz => false }) + expect(sha1_checker.artifact_info('org.kill-bill.billing.plugin.java:analytics-plugin:jar:LATEST')).to be_nil + expect(sha1_checker.artifact_info('org.kill-bill.billing.plugin.java:analytics-plugin:jar:7.0.4')).to be_nil end - it 'should NOT find matching sha1' do - existing = @sha1_checker.sha1('killbill-plugin-foo-1.0.0.tar.gz') - existing.should be_nil + it 'finds matching sha1' do + existing_sha1 = sha1_checker.sha1('org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0') + expect(existing_sha1).to eq('fce068c3fd5f95646ce0d09852f43ff67f06f0b9') + + existing_nexus = sha1_checker.artifact_info('org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0') + expect(existing_nexus[:sha1]).to eq(existing_sha1) + end + + it 'does not find matching sha1' do + existing_sha1 = sha1_checker.sha1('killbill-plugin-foo:tar.gz:1.0.0') + expect(existing_sha1).to be_nil + + existing_nexus = sha1_checker.artifact_info('killbill-plugin-foo:tar.gz:1.0.0') + expect(existing_nexus).to be_nil end - it 'should add an entry and find them all' do - @sha1_checker.add_or_modify_entry!('killbill-plugin-new-1.1.0.0.tar.gz', 'abc068c3fd5f95646ce0d09852f43ff67f06f111') + it 'adds an entry and find them all' do + sha1_checker.add_or_modify_entry!('killbill-plugin-new:tar.gz:1.1.0.0', 'abc068c3fd5f95646ce0d09852f43ff67f06f111') - existing = @sha1_checker.sha1('killbill-plugin-match-1.0.0.tar.gz') - existing.should_not be_nil - existing.should == 'fce068c3fd5f95646ce0d09852f43ff67f06f0b9' + existing = sha1_checker.sha1('org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0') + expect(existing).to eq('fce068c3fd5f95646ce0d09852f43ff67f06f0b9') - existing = @sha1_checker.sha1('killbill-plugin-new-1.1.0.0.tar.gz') - existing.should_not be_nil - existing.should == 'abc068c3fd5f95646ce0d09852f43ff67f06f111' + # Nexus cache untouched + existing_nexus = sha1_checker.artifact_info('org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0') + expect(existing_nexus[:sha1]).to eq(existing) + existing = sha1_checker.sha1('killbill-plugin-new:tar.gz:1.1.0.0') + expect(existing).to eq('abc068c3fd5f95646ce0d09852f43ff67f06f111') - existing = @sha1_checker.sha1('killbill-plugin-other-1.0.0.tar.gz') - existing.should_not be_nil - existing.should == 'bbb068c3fd5f95646ce0d09852f43ff67f06fccc' + # Nexus cache not updated + expect(sha1_checker.artifact_info('killbill-plugin-new:tar.gz:1.1.0.0')).to be_nil + + existing = sha1_checker.sha1('org.kill-bill.billing.plugin.ruby:killbill-plugin-other:tar.gz:1.0.0') + expect(existing).to eq('bbb068c3fd5f95646ce0d09852f43ff67f06fccc') + + # Nexus cache untouched + existing_nexus = sha1_checker.artifact_info('org.kill-bill.billing.plugin.ruby:killbill-plugin-other:tar.gz:1.0.0') + expect(existing_nexus[:sha1]).to eq(existing) end - it 'should add allow to modify an entry and find them all' do - existing = @sha1_checker.sha1('killbill-plugin-match-1.0.0.tar.gz') - existing.should_not be_nil - existing.should == 'fce068c3fd5f95646ce0d09852f43ff67f06f0b9' + it 'modifies an entry and find them all' do + existing = sha1_checker.sha1('org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0') + expect(existing).to eq('fce068c3fd5f95646ce0d09852f43ff67f06f0b9') + + existing_nexus = sha1_checker.artifact_info('org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0') + expect(existing_nexus[:sha1]).to eq(existing) + + sha1_checker.add_or_modify_entry!('org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0', 'dde068c3fd5f95646ce0d09852f43ff67f06f0aa') - @sha1_checker.add_or_modify_entry!('killbill-plugin-match-1.0.0.tar.gz', 'dde068c3fd5f95646ce0d09852f43ff67f06f0aa') + existing = sha1_checker.sha1('org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0') + expect(existing).to eq('dde068c3fd5f95646ce0d09852f43ff67f06f0aa') - existing = @sha1_checker.sha1('killbill-plugin-match-1.0.0.tar.gz') - existing.should_not be_nil - existing.should == 'dde068c3fd5f95646ce0d09852f43ff67f06f0aa' + # Nexus cache untouched (modified in another code path) + existing_nexus = sha1_checker.artifact_info('org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0') + expect(existing_nexus[:sha1]).to eq('fce068c3fd5f95646ce0d09852f43ff67f06f0b9') - existing = @sha1_checker.sha1('killbill-plugin-other-1.0.0.tar.gz') - existing.should_not be_nil - existing.should == 'bbb068c3fd5f95646ce0d09852f43ff67f06fccc' + existing = sha1_checker.sha1('org.kill-bill.billing.plugin.ruby:killbill-plugin-other:tar.gz:1.0.0') + expect(existing).to eq('bbb068c3fd5f95646ce0d09852f43ff67f06fccc') + + existing_nexus = sha1_checker.artifact_info('org.kill-bill.billing.plugin.ruby:killbill-plugin-other:tar.gz:1.0.0') + expect(existing_nexus[:sha1]).to eq(existing) end context 'when removing an entry' do - let(:identifier) { 'killbill-plugin-match-1.0.0.tar.gz' } + let(:identifier) { 'org.kill-bill.billing.plugin.ruby:killbill-plugin-match:tar.gz:1.0.0' } before do - @sha1_checker.remove_entry!(identifier) + sha1_checker.remove_entry!(identifier) end it 'does not find the entry' do - @sha1_checker.sha1(identifier).should be_nil + expect(sha1_checker.sha1(identifier)).to be_nil + expect(sha1_checker.artifact_info(identifier)).to be_nil end - let(:reloaded_checker) { KPM::Sha1Checker.from_file(@tmp_config) } it 'does not find entry in file system' do - reloaded_checker.sha1(identifier).should be_nil + expect(KPM::Sha1Checker.from_file(sha1_file).sha1(identifier)).to be_nil end end - it 'should work with empty config' do - tmp_destination_dir = Dir.tmpdir() - empty_config = File.join(tmp_destination_dir, 'sha1_test.yml') - if File.exists?(empty_config) - # Just to be sure - File.delete(empty_config) + it 'works with empty config' do + Dir.mktmpdir do |dir| + empty_config = File.join(dir, 'sha1.yml') + sha1_checker = KPM::Sha1Checker.from_file(empty_config) + sha1_checker.add_or_modify_entry!('killbill-plugin-new:tar.gz:1.1.0.0', 'abc068c3fd5f95646ce0d09852f43ff67f06f111') + existing = sha1_checker.sha1('killbill-plugin-new:tar.gz:1.1.0.0') + expect(existing).to eq('abc068c3fd5f95646ce0d09852f43ff67f06f111') end - @sha1_checker = KPM::Sha1Checker.from_file(empty_config) - - @sha1_checker.add_or_modify_entry!('killbill-plugin-new-1.1.0.0.tar.gz', 'abc068c3fd5f95646ce0d09852f43ff67f06f111') - existing = @sha1_checker.sha1('killbill-plugin-new-1.1.0.0.tar.gz') - existing.should_not be_nil - existing.should == 'abc068c3fd5f95646ce0d09852f43ff67f06f111' end end diff --git a/kpm/spec/kpm/unit/uninstaller_spec.rb b/kpm/spec/kpm/unit/uninstaller_spec.rb index a097b8e4..12381743 100644 --- a/kpm/spec/kpm/unit/uninstaller_spec.rb +++ b/kpm/spec/kpm/unit/uninstaller_spec.rb @@ -1,88 +1,101 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::Uninstaller do - + let(:destination) { Dir.mktmpdir('uninstaller_spec') } let(:uninstaller) { KPM::Uninstaller.new(destination) } - let(:destination) { 'somedir' } let(:plugins_manager_mock) { double(KPM::PluginsManager) } let(:sha1_checker_mock) { double(KPM::Sha1Checker) } + + let(:plugin_info) do + { + plugin_name: plugin_name, + plugin_key: plugin_key, + plugin_path: plugin_path, + versions: [{ version: version1, :is_default => false, :is_disabled => false, :sha1 => nil }, { version: version2, :is_default => true, :is_disabled => false, :sha1 => nil }], + type: 'java', + group_id: 'group', + artifact_id: 'artifact', + packaging: 'jar', + classifier: nil + } + end + let(:plugin_name) { 'plugin-name' } + let(:plugin_key) { 'plugin-key' } + let(:plugin_path) { "#{destination}/plugins/java/#{plugin_name}" } + let(:version1) { '1.0' } + let(:version2) { '2.0' } + before do - KPM::Inspector.stub_chain(:new, :inspect).and_return(installed_plugins) KPM::PluginsManager.stub(:new).and_return(plugins_manager_mock) KPM::Sha1Checker.stub(:from_file).and_return(sha1_checker_mock) - end - - context 'when plugin is not installed' do - let(:installed_plugins) { {} } - it 'raises a plugin not found error' do - expect { - uninstaller.uninstall_plugin('adyen') - }.to raise_error(StandardError, "No plugin with key/name 'adyen' found installed. Try running 'kpm inspect' for more info") + # Calls by the Inspector + plugins_manager_mock.stub(:get_identifier_key_and_entry) do + [plugin_key, { 'group_id' => plugin_info[:group_id], + 'artifact_id' => plugin_info[:artifact_id], + 'packaging' => plugin_info[:packaging] }] end + sha1_checker_mock.stub(:all_sha1) { {} } end - context 'when plugin is installed' do - let(:installed_plugins) do - { - plugin_name => { - plugin_key: plugin_key, - plugin_path: plugin_path, - versions: [{version: version}], - group_id: 'group', - artifact_id: 'artifact', - packaging: 'jar' - } - } - end - - let(:plugin_name) { 'plugin-name' } - let(:plugin_key) { 'plugin-key' } - let(:plugin_path) { 'plugin-path' } - let(:version) { '1.0' } - - it 'uninstalls a plugin by name' do - FileUtils.should_receive(:rmtree).with(plugin_path) - plugins_manager_mock.should_receive(:remove_plugin_identifier_key).with(plugin_key) - sha1_checker_mock.should_receive(:remove_entry!).with("group:artifact:jar:#{version}") + context 'utility methods' do + it 'raises an error when directory to delete is invalid' do + expect do + uninstaller.send(:validate_dir_for_rmrf, '/home/john') + end.to raise_error(ArgumentError, 'Path /home/john is not a valid directory') + end + it 'raises an error when directory to delete is not safe' do + expect do + uninstaller.send(:validate_dir_for_rmrf, '/tmp') + end.to raise_error(ArgumentError, "Path /tmp is not a subdirectory of #{destination}") + end + end - uninstaller.uninstall_plugin(plugin_name).should be_true + context 'when no plugin is installed' do + it 'raises an error when uninstalling a plugin' do + expect do + uninstaller.uninstall_plugin(plugin_name) + end.to raise_error(StandardError, "No plugin with key/name '#{plugin_name}' found installed. Try running 'kpm inspect' for more info") end - it 'uninstalls a plugin by key' do - FileUtils.should_receive(:rmtree).with(plugin_path) - plugins_manager_mock.should_receive(:remove_plugin_identifier_key).with(plugin_key) - sha1_checker_mock.should_receive(:remove_entry!).with("group:artifact:jar:#{version}") + it 'raises an internal error when uninstalling a plugin' do + expect do + uninstaller.send(:remove_plugin_versions, plugin_info, true, [version1, version2]) + end.to raise_error(ArgumentError, "Path #{plugin_info[:plugin_path]}/#{version1} is not a valid directory") + end - uninstaller.uninstall_plugin(plugin_key).should be_true + it 'raises an error when uninstalling a plugin version' do + expect do + uninstaller.send(:remove_plugin_version, plugin_info, '3.0') + end.to raise_error(ArgumentError, "Path #{plugin_info[:plugin_path]}/3.0 is not a valid directory") end + + it { expect(uninstaller.send(:categorize_plugins)).to eq({ to_be_deleted: [], to_keep: [] }) } end context 'when plugin is installed' do - let(:installed_plugins) do - { - plugin_name => { - plugin_key: plugin_key, - plugin_path: plugin_path, - versions: [{version: version1},{version: version2}], - group_id: 'group', - artifact_id: 'artifact', - packaging: 'jar' - } - } - end - - let(:plugin_name) { 'plugin-name' } - let(:plugin_key) { 'plugin-key' } - let(:plugin_path) { 'plugin-path' } - let(:version1) { '1.0' } - let(:version2) { '2.0' } + before do + FileUtils.mkdir_p(plugin_version1_path) + FileUtils.mkdir_p(plugin_version2_path) + FileUtils.ln_s(plugin_version2_path, Pathname.new(plugin_path).join('SET_DEFAULT')) + end + let(:plugin_version1_path) { File.join(plugin_path, version1) } + let(:plugin_version2_path) { File.join(plugin_path, version2) } + + it 'looks up a plugin by name' do + expect(uninstaller.send(:find_plugin, plugin_name)).to eq(plugin_info) + end + + it 'looks up a plugin by key' do + expect(uninstaller.send(:find_plugin, plugin_key)).to eq(plugin_info) + end it 'uninstalls if user confirms action' do KPM.ui.should_receive(:ask).and_return('y') - FileUtils.should_receive(:rmtree).with(plugin_path) plugins_manager_mock.should_receive(:remove_plugin_identifier_key).with(plugin_key) sha1_checker_mock.should_receive(:remove_entry!).with("group:artifact:jar:#{version1}") sha1_checker_mock.should_receive(:remove_entry!).with("group:artifact:jar:#{version2}") @@ -96,13 +109,46 @@ uninstaller.uninstall_plugin(plugin_name).should be_false end - it 'uninstalls without confirmation if the force option is given' do - FileUtils.should_receive(:rmtree).with(plugin_path) + it 'uninstalls all plugins without confirmation if the force option is given' do plugins_manager_mock.should_receive(:remove_plugin_identifier_key).with(plugin_key) sha1_checker_mock.should_receive(:remove_entry!).with("group:artifact:jar:#{version1}") sha1_checker_mock.should_receive(:remove_entry!).with("group:artifact:jar:#{version2}") uninstaller.uninstall_plugin(plugin_name, true).should be_true end + + it 'uninstalls one version without confirmation if the force option is given' do + sha1_checker_mock.should_receive(:remove_entry!).with("group:artifact:jar:#{version1}") + + uninstaller.uninstall_plugin(plugin_name, true, version1).should be_true + end + + it 'raises an error when uninstalling a version that does not exist' do + expect do + uninstaller.uninstall_plugin(plugin_name, true, '3.0') + end.to raise_error(ArgumentError) + end + + it 'categorizes plugins depending on default flag' do + expect(uninstaller.send(:categorize_plugins)).to eq({ to_be_deleted: [[plugin_info, version1]], to_keep: [[plugin_info, version2]] }) + end + + it 'does not cleanup if dry-run is set' do + expect(uninstaller.uninstall_non_default_plugins(true)).to be_false + end + + it 'does cleanup if dry-run isn\'t set' do + sha1_checker_mock.should_receive(:remove_entry!).with("group:artifact:jar:#{version1}") + + plugin_info_copy = Marshal.load(Marshal.dump(plugin_info)) + expect(KPM::Inspector.new.inspect(destination)).to eq({ plugin_name => plugin_info_copy }) + + expect(uninstaller.uninstall_non_default_plugins(false)).to be_true + plugin_info_copy[:versions].delete_at(0) + expect(KPM::Inspector.new.inspect(destination)).to eq({ plugin_name => plugin_info_copy }) + + # Second time is a no-op + expect(uninstaller.uninstall_non_default_plugins).to be_false + end end end diff --git a/kpm/spec/kpm/unit_mysql/account_spec.rb b/kpm/spec/kpm/unit_mysql/account_spec.rb index 55d4c88b..52bbed8a 100644 --- a/kpm/spec/kpm/unit_mysql/account_spec.rb +++ b/kpm/spec/kpm/unit_mysql/account_spec.rb @@ -1,35 +1,37 @@ +# frozen_string_literal: true + require 'spec_helper' describe KPM::Account do - shared_context 'account' do include_context 'connection_setup' - let(:account_class) { described_class.new(nil,[killbill_api_key,killbill_api_secrets], - [killbill_user, killbill_password],url, - db_name, [db_username, db_password],db_host,db_port,nil,logger)} - let(:dummy_account_id) {SecureRandom.uuid} - let(:account_id_invalid) {SecureRandom.uuid} - let(:dummy_data) { + let(:account_class) do + described_class.new(nil, [killbill_api_key, killbill_api_secret], + [killbill_user, killbill_password], url, + db_name, [db_username, db_password], db_host, db_port, nil, logger) + end + let(:dummy_account_id) { SecureRandom.uuid } + let(:account_id_invalid) { SecureRandom.uuid } + let(:account_id) { creating_account_with_client } + let(:dummy_data) do "-- accounts record_id|id|external_key|email|name|first_name_length|currency|billing_cycle_day_local|parent_account_id|is_payment_delegated_to_parent|payment_method_id|time_zone|locale|address1|address2|company_name|city|state_or_province|country|postal_code|phone|notes|migrated|is_notified_for_invoices|created_date|created_by|updated_date|updated_by|tenant_record_id\n"\ - "5|#{dummy_account_id}|#{dummy_account_id}|willharnet@example.com|Will Harnet||USD|0||||UTC||||||||||||false|2017-04-03T15:50:14.000+0000|demo|2017-04-05T15:01:39.000+0000|Killbill::Stripe::PaymentPlugin|2\n"\ + "5|#{dummy_account_id}|#{dummy_account_id}|willharnet@example.com|Will Harnet||USD|0||||UTC||||Company\\N{VERTICAL LINE}\\N{LINE FEED}Name||||||||false|2017-04-03T15:50:14.000+0000|demo|2017-04-05T15:01:39.000+0000|Killbill::Stripe::PaymentPlugin|2\n"\ "-- account_history record_id|id|target_record_id|external_key|email|name|first_name_length|currency|billing_cycle_day_local|parent_account_id|payment_method_id|is_payment_delegated_to_parent|time_zone|locale|address1|address2|company_name|city|state_or_province|country|postal_code|phone|notes|migrated|is_notified_for_invoices|change_type|created_by|created_date|updated_by|updated_date|tenant_record_id\n"\ - "3|#{SecureRandom.uuid}|5|#{dummy_account_id}|willharnet@example.com|Will Harnet||USD|0||||UTC||||||||||||false|INSERT|demo|2017-04-03T15:50:14.000+0000|demo|2017-04-03T15:50:14.000+0000|2\n" - } - let(:cols_names) {dummy_data.split("\n")[0].split(" ")[2]} - let(:cols_data) {dummy_data.split("\n")[1]} - let(:table_name) {dummy_data.split("\n")[0].split(" ")[1]} - let(:obfuscating_marker) {:email} - let(:mysql_cli) {"mysql --user=#{db_username} --password=#{db_password} --host=#{db_host} --port=#{db_port} "} - let(:test_ddl) {Dir["#{Dir.pwd}/**/account_test_ddl.sql"][0]} - + "3|#{SecureRandom.uuid}|5|#{dummy_account_id}|willharnet@example.com|Will Harnet||USD|0||||UTC||||Company\\N{VERTICAL LINE}\\N{LINE FEED}Name||||||||false|INSERT|demo|2017-04-03T15:50:14.000+0000|demo|2017-04-03T15:50:14.000+0000|2\n" + end + let(:cols_names) { dummy_data.split("\n")[0].split(' ')[2] } + let(:cols_data) { dummy_data.split("\n")[1] } + let(:table_name) { dummy_data.split("\n")[0].split(' ')[1] } + let(:obfuscating_marker) { :email } + let(:mysql_cli) { "mysql --user=#{db_username} --password=#{db_password} --host=#{db_host} --port=#{db_port} " } + let(:test_ddl) { Dir["#{Dir.pwd}/**/account_test_ddl.sql"][0] } end describe '#initialize' do - include_context 'account' + include_context 'account' context 'when creating an instance of account class' do - it 'when initialized with defaults' do expect(described_class.new).to be_an_instance_of(KPM::Account) end @@ -37,15 +39,12 @@ it 'when initialized with options' do account_class.should be_an_instance_of(KPM::Account) expect(account_class.instance_variable_get(:@killbill_api_key)).to eq(killbill_api_key) - expect(account_class.instance_variable_get(:@killbill_api_secrets)).to eq(killbill_api_secrets) + expect(account_class.instance_variable_get(:@killbill_api_secret)).to eq(killbill_api_secret) expect(account_class.instance_variable_get(:@killbill_user)).to eq(killbill_user) expect(account_class.instance_variable_get(:@killbill_password)).to eq(killbill_password) expect(account_class.instance_variable_get(:@killbill_url)).to eq(url) - end - end - end # export data tests @@ -53,46 +52,38 @@ include_context 'account' context 'when fetching account from api' do - it 'when account id not found' do - expect{ account_class.send(:fetch_export_data, account_id_invalid) }.to raise_error(Interrupt, 'Account id not found') + expect { account_class.send(:fetch_export_data, account_id_invalid) }.to raise_error(Interrupt, 'Account id not found') end it 'when account id found' do - account_id = creating_account_with_client expect(account_id).to match(/\w{8}(-\w{4}){3}-\w{12}?/) - expect{ account_class.send(:fetch_export_data, account_id) }.not_to raise_error(Interrupt, 'Account id not found') + expect { account_class.send(:fetch_export_data, account_id) }.not_to raise_error(Interrupt, 'Account id not found') expect(account_class.send(:fetch_export_data, account_id)).to match(account_id) end - end - end describe '#process_export_data' do include_context 'account' context 'when processing data to export' do - it 'when column name qty eq column data qty' do - expect(account_class.send(:process_export_data, cols_data, table_name, cols_names.split("|")).split("|").size).to eq(cols_names.split("|").size) + expect(account_class.send(:process_export_data, cols_data, table_name, cols_names.split('|')).split('|').size).to eq(cols_names.split('|').size) end it 'when obfuscating data' do marker_index = 0 - cols_names.split("|").each do |col_name| - if col_name.equal?(obfuscating_marker.to_s) - break - end + cols_names.split('|').each do |col_name| + break if col_name.equal?(obfuscating_marker.to_s) + marker_index += 1 end - obfuscating_marker_data = account_class.send(:process_export_data, cols_data, table_name, cols_names.split("|")).split("|") + obfuscating_marker_data = account_class.send(:process_export_data, cols_data, table_name, cols_names.split('|')).split('|') expect(obfuscating_marker_data[marker_index]).to be_nil end - end - end describe '#remove_export_data' do @@ -101,14 +92,12 @@ it 'when obfuscating value' do expect(account_class.send(:remove_export_data, table_name, obfuscating_marker.to_s, 'willharnet@example.com')).to be_nil end - end describe '#export' do include_context 'account' context 'when exporting data' do - it 'when file created' do expect(File.exist?(account_class.send(:export, dummy_data))).to be_true end @@ -117,35 +106,28 @@ expect(File.readlines(account_class.send(:export, dummy_data)).grep(/#{table_name}/)).to be_true expect(File.readlines(account_class.send(:export, dummy_data)).grep(/#{cols_names}/)).to be_true end - end - end describe '#export_data' do include_context 'account' context 'when exporting data; main method' do - it 'when no account id' do - expect{ account_class.export_data }.to raise_error(Interrupt, 'Account id not found') + expect { account_class.export_data }.to raise_error(Interrupt, 'Account id not found') end it 'when file created' do - account_id = creating_account_with_client expect(account_id).to match(/\w{8}(-\w{4}){3}-\w{12}?/) expect(File.exist?(account_class.export_data(account_id))).to be_true end it 'when file contains account record' do - account_id = creating_account_with_client expect(account_id).to match(/\w{8}(-\w{4}){3}-\w{12}?/) expect(File.readlines(account_class.export_data(account_id)).grep(/#{table_name}/)).to be_true expect(File.readlines(account_class.export_data(account_id)).grep(/#{cols_names}/)).to be_true end - end - end # import data tests @@ -153,14 +135,14 @@ include_context 'account' it 'when data delimiter is sniffed as "|"' do - open (dummy_data_file), 'w' do |io| - io.puts(dummy_data) + File.open(dummy_data_file, 'w') do |io| + io.puts(dummy_data) end - + expect(account_class.send(:sniff_delimiter, dummy_data_file)).to eq('|') end end - + describe '#fill_empty_column' do include_context 'account' @@ -173,7 +155,7 @@ include_context 'account' it 'when valid date value' do - expect{DateTime.parse(account_class.send(:fix_dates, '2017-04-05T15:01:39.000+0000'))}.not_to raise_error(ArgumentError) + expect { DateTime.parse(account_class.send(:fix_dates, '2017-04-05T15:01:39.000+0000')) }.not_to raise_error(ArgumentError) end it 'when valid date value match YYYY-MM-DD HH:MM:SS' do @@ -181,7 +163,7 @@ end it 'when invalid date value' do - expect{DateTime.parse(account_class.send(:fix_dates, 'JO'))}.to raise_error(ArgumentError) + expect { DateTime.parse(account_class.send(:fix_dates, 'JO')) }.to raise_error(ArgumentError) end end @@ -196,7 +178,6 @@ it 'when false' do expect(account_class.send(:replace_boolean, false)).to eq(0) end - end end @@ -222,7 +203,6 @@ it 'when field is search_key1 and table bus_events_history' do expect(account_class.send(:replace_account_record_id, 'bus_events_history', 'search_key1', '1')).to eq(:@account_record_id) end - end describe '#replace_tenant_record_id' do @@ -242,7 +222,6 @@ account_class.instance_variable_set(:@tenant_record_id, 10) expect(account_class.send(:replace_tenant_record_id, 'bus_events_history', 'search_key2', '1')).to eq(10) end - end describe '#replace_uuid' do @@ -259,19 +238,39 @@ expect(account_class.send(:replace_uuid, table_name, 'other_id', dummy_account_id)).to eq(dummy_account_id) end end - end describe '#sanitize' do include_context 'account' it 'when skip payment method' do - expect(account_class.send(:sanitize, 'payment_methods', 'plugin_name', 'Payment Method',true)).to eq('__EXTERNAL_PAYMENT__') + expect(account_class.send(:sanitize, 'payment_methods', 'plugin_name', 'Payment Method', true)).to eq('__EXTERNAL_PAYMENT__') end it 'when nothing to sanitize' do - expect(account_class.send(:sanitize, table_name, 'id', dummy_account_id,false)).to eq(dummy_account_id) + expect(account_class.send(:sanitize, table_name, 'id', dummy_account_id, false)).to eq(dummy_account_id) end + end + + describe '#sanitize_for_b64_date' do + include_context 'account' + let(:non_encoded_b64) do + # This is my test data + 'This is my test data' + end + + let(:encoded_b64) do + # This is my test data + 'VGhpcyBpcyBteSB0ZXN0IGRhdGE=' + end + + it 'when b64 encoded data' do + expect(account_class.send(:b64_decode_if_needed, encoded_b64).value).to start_with('LOAD_FILE("') + end + + it 'when b64 non encoded data' do + expect(account_class.send(:b64_decode_if_needed, non_encoded_b64)).to eq(non_encoded_b64) + end end describe '#process_import_data' do @@ -279,137 +278,139 @@ context 'when processing data to import' do it 'when column name qty eq column data qty without record_id' do - account_class.instance_variable_set(:@generate_record_id,true) - expect(account_class.send(:process_import_data, cols_data, table_name, cols_names.split('|'), false, []).size).to eq(cols_names.split("|").size-1) + account_class.instance_variable_set(:@generate_record_id, true) + expect(account_class.send(:process_import_data, cols_data, table_name, cols_names.split('|'), false, []).size).to eq(cols_names.split('|').size - 1) end end - end describe '#import_data' do include_context 'account' context 'when data to import; main import method' do - it 'when creating test schema' do db = create_test_schema expect(db).to eq(db_name) end - + it 'when importing data with empty file' do File.new(dummy_data_file, 'w+').close - expect{account_class.import_data(dummy_data_file,nil,true,false,true) }.to raise_error(Interrupt,"Data on #{dummy_data_file} is invalid") + expect { account_class.import_data(dummy_data_file, nil, true, false, true) }.to raise_error(Interrupt, "Data on #{dummy_data_file} is invalid") File.delete(dummy_data_file) end - + it 'when importing data with no file' do - expect{account_class.import_data(dummy_data_file,nil,true,false,true) }.to raise_error(Interrupt,'Need to specify a valid file') + expect { account_class.import_data(dummy_data_file, nil, true, false, true) }.to raise_error(Interrupt, "File #{dummy_data_file} does not exist") end - + it 'when importing data with new record_id' do - open (dummy_data_file), 'w' do |io| + File.open(dummy_data_file, 'w') do |io| io.puts(dummy_data) end - expect{account_class.import_data(dummy_data_file,nil,true,false,true) }.not_to raise_error(Interrupt) + expect { account_class.import_data(dummy_data_file, nil, true, false, true) }.not_to raise_error(Interrupt) + + verify_data(dummy_account_id) - row_count_inserted = delete_statement('accounts','id',dummy_account_id) + row_count_inserted = delete_statement('accounts', 'id', dummy_account_id) expect(row_count_inserted).to eq('1') - row_count_inserted = delete_statement('account_history','external_key',dummy_account_id) + row_count_inserted = delete_statement('account_history', 'external_key', dummy_account_id) expect(row_count_inserted).to eq('1') end it 'when importing data reusing record_id' do - open (dummy_data_file), 'w' do |io| + File.open(dummy_data_file, 'w') do |io| io.puts(dummy_data) end - expect{account_class.import_data(dummy_data_file,nil,true,false,false) }.not_to raise_error(Interrupt) + expect { account_class.import_data(dummy_data_file, nil, true, false, false) }.not_to raise_error(Interrupt) - row_count_inserted = delete_statement('accounts','id',dummy_account_id) + verify_data(dummy_account_id) + + row_count_inserted = delete_statement('accounts', 'id', dummy_account_id) expect(row_count_inserted).to eq('1') - row_count_inserted = delete_statement('account_history','external_key',dummy_account_id) + row_count_inserted = delete_statement('account_history', 'external_key', dummy_account_id) expect(row_count_inserted).to eq('1') end it 'when importing data with different tenant_record_id' do - open (dummy_data_file), 'w' do |io| + File.open(dummy_data_file, 'w') do |io| io.puts(dummy_data) end - expect{account_class.import_data(dummy_data_file,10,true,false,true) }.not_to raise_error(Interrupt) + expect { account_class.import_data(dummy_data_file, 10, true, false, true) }.not_to raise_error(Interrupt) + + verify_data(dummy_account_id) - row_count_inserted = delete_statement('accounts','id',dummy_account_id) + row_count_inserted = delete_statement('accounts', 'id', dummy_account_id) expect(row_count_inserted).to eq('1') - row_count_inserted = delete_statement('account_history','external_key',dummy_account_id) + row_count_inserted = delete_statement('account_history', 'external_key', dummy_account_id) expect(row_count_inserted).to eq('1') end it 'when round trip' do - open (dummy_data_file), 'w' do |io| + File.open(dummy_data_file, 'w') do |io| io.puts(dummy_data) end - expect{account_class.import_data(dummy_data_file,10,true,true,true) }.not_to raise_error(Interrupt) + expect { account_class.import_data(dummy_data_file, 10, true, true, true) }.not_to raise_error(Interrupt) new_account_id = account_class.instance_variable_get(:@tables_id) - row_count_inserted = delete_statement('accounts','id',new_account_id['accounts_id']) + verify_data(new_account_id['accounts_id']) + + row_count_inserted = delete_statement('accounts', 'id', new_account_id['accounts_id']) expect(row_count_inserted).to eq('1') - row_count_inserted = delete_statement('account_history','external_key',new_account_id['accounts_id']) + row_count_inserted = delete_statement('account_history', 'external_key', new_account_id['accounts_id']) expect(row_count_inserted).to eq('1') end - + it 'when droping test schema' do response = drop_test_schema expect(response).to match('') end - end - end - - private - def creating_account_with_client - if $account_id.nil? - KillBillClient.url = url - - options = { - :username => killbill_user, - :password => killbill_password, - :api_key => killbill_api_key, - :api_secret => killbill_api_secrets - } - - account = KillBillClient::Model::Account.new - account.name = 'KPM Account Test' - account.first_name_length = 3 - account.external_key = SecureRandom.uuid - account.currency = 'USD' - account = account.create('kpm_account_test', 'kpm_account_test', 'kpm_account_test', options) - - $account_id = account.account_id - end - - $account_id - end - - def delete_statement(table_name,column_name,account_id) - response = `#{mysql_cli} #{db_name} -e "DELETE FROM #{table_name} WHERE #{column_name} = '#{account_id}'; SELECT ROW_COUNT();" 2>&1` - response_msg = response.split("\n") - row_count_inserted = response_msg[response_msg.size - 1] - - row_count_inserted - end - - def create_test_schema - response = `#{mysql_cli} -e "CREATE DATABASE IF NOT EXISTS #{db_name};"` - response = `#{mysql_cli} #{db_name} < "#{test_ddl}" 2>&1` - response_msg = response.split("\n") - used_database = response_msg[response_msg.size - 1] - - used_database - end - - def drop_test_schema - response = `#{mysql_cli} -e "DROP DATABASE #{db_name};"`; - response - end + private + + def creating_account_with_client + KillBillClient.url = url + + options = { + username: killbill_user, + password: killbill_password, + api_key: killbill_api_key, + api_secret: killbill_api_secret + } + + account = KillBillClient::Model::Account.new + account.name = 'KPM Account Test' + account.first_name_length = 3 + account.external_key = SecureRandom.uuid + account.currency = 'USD' + account = account.create('kpm_account_test', 'kpm_account_test', 'kpm_account_test', options) + + account.account_id + end -end \ No newline at end of file + def verify_data(account_id) + response = `#{mysql_cli} #{db_name} -e "select company_name FROM accounts WHERE id = '#{account_id}';" 2>&1` + response_msg = response.split("\n") + company_name = response_msg[response_msg.size - 1] + + expect(company_name).to eq('Company|\\nName') + end + + def delete_statement(table_name, column_name, account_id) + response = `#{mysql_cli} #{db_name} -e "DELETE FROM #{table_name} WHERE #{column_name} = '#{account_id}'; SELECT ROW_COUNT();" 2>&1` + response_msg = response.split("\n") + response_msg[response_msg.size - 1] + end + + def create_test_schema + `#{mysql_cli} -e "CREATE DATABASE IF NOT EXISTS #{db_name};"` + response = `#{mysql_cli} #{db_name} < "#{test_ddl}" 2>&1` + response_msg = response.split("\n") + response_msg[response_msg.size - 1] + end + + def drop_test_schema + `#{mysql_cli} -e "DROP DATABASE #{db_name};"` + end +end diff --git a/kpm/spec/spec_helper.rb b/kpm/spec/spec_helper.rb index a187e8b6..c7f85a6d 100644 --- a/kpm/spec/spec_helper.rb +++ b/kpm/spec/spec_helper.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require 'tmpdir' require 'thor' require 'kpm' @@ -11,25 +13,25 @@ config.color_enabled = true config.tty = true config.formatter = 'documentation' - config.filter_run_excluding :skip_me_if_nil => true + config.filter_run_excluding skip_me_if_nil: true end shared_context 'connection_setup' do let(:logger) do - logger = ::Logger.new(STDOUT) - logger.level = Logger::FATAL - logger + logger = ::Logger.new(STDOUT) + logger.level = Logger::FATAL + logger end - let(:yml_file) {YAML::load_file(Dir["#{Dir.pwd}/**/account_spec.yml"][0])} - let(:dummy_data_file) {Dir.mktmpdir('dummy') + File::SEPARATOR + 'kbdump'} - let(:url) {"http://#{yml_file['killbill']['host']}:#{yml_file['killbill']['port']}"} - let(:killbill_api_key) {yml_file['killbill']['api_key']} - let(:killbill_api_secrets) {yml_file['killbill']['api_secret']} - let(:killbill_user) {yml_file['killbill']['user']} - let(:killbill_password) {yml_file['killbill']['password']} - let(:db_name) {yml_file['database']['name']} - let(:db_username) {yml_file['database']['user']} - let(:db_password) {yml_file['database']['password']} - let(:db_host) {yml_file['database']['host']} - let(:db_port) {yml_file['database']['port']} -end \ No newline at end of file + let(:yml_file) { YAML.load_file(Dir["#{Dir.pwd}/**/account_spec.yml"][0]) } + let(:dummy_data_file) { Dir.mktmpdir('dummy') + File::SEPARATOR + 'kbdump' } + let(:url) { "http://#{yml_file['killbill']['host']}:#{yml_file['killbill']['port']}" } + let(:killbill_api_key) { yml_file['killbill']['api_key'] } + let(:killbill_api_secret) { yml_file['killbill']['api_secret'] } + let(:killbill_user) { yml_file['killbill']['user'] } + let(:killbill_password) { yml_file['killbill']['password'] } + let(:db_name) { yml_file['database']['name'] } + let(:db_username) { yml_file['database']['user'] } + let(:db_password) { yml_file['database']['password'] } + let(:db_host) { yml_file['database']['host'] } + let(:db_port) { yml_file['database']['port'] } +end diff --git a/kpm/tasks/package.rake b/kpm/tasks/package.rake index 26486751..68b1265d 100644 --- a/kpm/tasks/package.rake +++ b/kpm/tasks/package.rake @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # For Bundler.with_clean_env require 'bundler/setup' require 'yaml' @@ -12,40 +14,38 @@ TRAVELING_RUBY_VERSION = '20150715-2.2.2' # Remove unused files to reduce package size GEMS_PATH = 'packaging/vendor/ruby/*/gems/*/' -REMOVE_FILES = %w(test tests spec README* CHANGE* Change* COPYING* LICENSE* MIT-LICENSE* doc docs examples ext/*/Makefile .gitignore .travis.yml) -REMOVE_EXTENSIONS = %w(*.md *.c *.h *.rl extconf.rb *.java *.class *.so *.o) +REMOVE_FILES = %w[test tests spec README* CHANGE* Change* COPYING* LICENSE* MIT-LICENSE* doc docs examples ext/*/Makefile .gitignore .travis.yml].freeze +REMOVE_EXTENSIONS = %w[*.md *.c *.h *.rl extconf.rb *.java *.class *.so *.o].freeze desc 'Package your app' -task :package => %w(package:linux:x86 package:linux:x86_64 package:osx) +task package: %w[package:linux:x86 package:linux:x86_64 package:osx] namespace :package do namespace :linux do desc 'Package KPM for Linux x86' - task :x86 => [:bundle_install, "packaging/traveling-ruby-#{TRAVELING_RUBY_VERSION}-linux-x86.tar.gz"] do + task x86: [:bundle_install, "packaging/traveling-ruby-#{TRAVELING_RUBY_VERSION}-linux-x86.tar.gz"] do create_package('linux-x86') end desc 'Package KPM for Linux x86_64' - task :x86_64 => [:bundle_install, "packaging/traveling-ruby-#{TRAVELING_RUBY_VERSION}-linux-x86_64.tar.gz"] do + task x86_64: [:bundle_install, "packaging/traveling-ruby-#{TRAVELING_RUBY_VERSION}-linux-x86_64.tar.gz"] do create_package('linux-x86_64') end end desc 'Package KPM for OS X' - task :osx => [:bundle_install, "packaging/traveling-ruby-#{TRAVELING_RUBY_VERSION}-osx.tar.gz"] do + task osx: [:bundle_install, "packaging/traveling-ruby-#{TRAVELING_RUBY_VERSION}-osx.tar.gz"] do create_package('osx') end desc 'Install gems to local directory' - task :bundle_install => [:clean] do + task bundle_install: [:clean] do # abort if version packaging does not exist on repository abort "KPM #{VERSION} does not exists in the repository." unless gem_exists? # Note! Must match TRAVELING_RUBY_VERSION above expected_ruby_version = TRAVELING_RUBY_VERSION.split('-')[-1] - if RUBY_VERSION !~ /#{Regexp.quote(expected_ruby_version)}/ - abort "You can only 'bundle install' using Ruby #{expected_ruby_version}, because that's what Traveling Ruby uses." - end + abort "You can only 'bundle install' using Ruby #{expected_ruby_version}, because that's what Traveling Ruby uses." if RUBY_VERSION !~ /#{Regexp.quote(expected_ruby_version)}/ sh 'rm -rf packaging/tmp' sh 'mkdir -p packaging/tmp' sh 'cp packaging/Gemfile packaging/tmp/' @@ -108,21 +108,21 @@ def create_package(target) sh "mkdir #{package_dir}/lib/vendor/.bundle" sh "cp packaging/bundler-config #{package_dir}/lib/vendor/.bundle/config" - if !ENV['DIR_ONLY'] - sh "tar -czf #{package_dir}.tar.gz #{package_dir}" - sh "rm -rf #{package_dir}" - end + return if ENV['DIR_ONLY'] + + sh "tar -czf #{package_dir}.tar.gz #{package_dir}" + sh "rm -rf #{package_dir}" end def download_runtime(target) - sh 'mkdir -p packaging && cd packaging && curl -L -O --fail ' + - "https://d6r77u77i8pq3.cloudfront.net/releases/traveling-ruby-#{TRAVELING_RUBY_VERSION}-#{target}.tar.gz" + sh 'mkdir -p packaging && cd packaging && curl -L -O --fail ' \ + "https://d6r77u77i8pq3.cloudfront.net/releases/traveling-ruby-#{TRAVELING_RUBY_VERSION}-#{target}.tar.gz" end def gem_exists? response = `gem specification 'kpm' -r -v #{VERSION} 2>&1` return false if response.nil? - specification = YAML::load(response) + specification = YAML.load(response) specification.instance_of?(Gem::Specification) -end \ No newline at end of file +end